Initial import of version 1.0.2

This commit is contained in:
Benjamin Dauvergne 2015-02-13 17:35:02 +01:00
commit 718cee6452
121 changed files with 11615 additions and 0 deletions

22
PKG-INFO Normal file
View File

@ -0,0 +1,22 @@
Metadata-Version: 1.1
Name: South
Version: 1.0.2
Summary: South: Migrations for Django
Home-page: http://south.aeracode.org/
Author: Andrew Godwin & Andy McCurdy
Author-email: south@aeracode.org
License: UNKNOWN
Download-URL: http://south.aeracode.org/wiki/Download
Description: South is an intelligent database migrations library for the Django web framework. It is database-independent and DVCS-friendly, as well as a whole host of other features.
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Framework :: Django
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: System Administrators
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: OS Independent
Classifier: Topic :: Software Development
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7

6
README Normal file
View File

@ -0,0 +1,6 @@
This is South, a Django application to provide schema and data migrations.
Documentation on South is currently available on our project site;
you can find it at http://south.aeracode.org/docs/
South is compatable with Django 1.2 and higher, and Python 2.6 and higher.

2
setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[wheel]
universal = 1

65
setup.py Executable file
View File

@ -0,0 +1,65 @@
#!/usr/bin/env python
# Use setuptools if we can
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
from south import __version__
setup(
name='South',
version=__version__,
description='South: Migrations for Django',
long_description='South is an intelligent database migrations library for the Django web framework. It is database-independent and DVCS-friendly, as well as a whole host of other features.',
author='Andrew Godwin & Andy McCurdy',
author_email='south@aeracode.org',
url='http://south.aeracode.org/',
download_url='http://south.aeracode.org/wiki/Download',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
packages=[
'south',
'south.creator',
'south.db',
'south.management',
'south.introspection_plugins',
'south.hacks',
'south.migration',
'south.tests',
'south.db.sql_server',
'south.management.commands',
'south.tests.circular_a',
'south.tests.emptyapp',
'south.tests.deps_a',
'south.tests.fakeapp',
'south.tests.brokenapp',
'south.tests.circular_b',
'south.tests.otherfakeapp',
'south.tests.deps_c',
'south.tests.deps_b',
'south.tests.non_managed',
'south.tests.circular_a.migrations',
'south.tests.emptyapp.migrations',
'south.tests.deps_a.migrations',
'south.tests.fakeapp.migrations',
'south.tests.brokenapp.migrations',
'south.tests.circular_b.migrations',
'south.tests.otherfakeapp.migrations',
'south.tests.deps_c.migrations',
'south.tests.deps_b.migrations',
'south.tests.non_managed.migrations',
'south.utils',
],
)

9
south/__init__.py Normal file
View File

@ -0,0 +1,9 @@
"""
South - Useable migrations for Django apps
"""
__version__ = "1.0.2"
__authors__ = [
"Andrew Godwin <andrew@aeracode.org>",
"Andy McCurdy <andy@andymccurdy.com>"
]

View File

@ -0,0 +1,5 @@
"""
The creator module is responsible for making new migration files, either
as blank templates or autodetecting changes. It contains code that used to
all be in startmigration.py.
"""

559
south/creator/actions.py Normal file
View File

@ -0,0 +1,559 @@
"""
Actions - things like 'a model was removed' or 'a field was changed'.
Each one has a class, which can take the action description and insert code
blocks into the forwards() and backwards() methods, in the right place.
"""
from __future__ import print_function
import sys
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
from django.db.models.fields import FieldDoesNotExist, NOT_PROVIDED, CharField, TextField
from south.modelsinspector import value_clean
from south.creator.freezer import remove_useless_attributes, model_key
from south.utils import datetime_utils
from south.utils.py3 import raw_input
class Action(object):
"""
Generic base Action class. Contains utility methods for inserting into
the forwards() and backwards() method lists.
"""
prepend_forwards = False
prepend_backwards = False
def forwards_code(self):
raise NotImplementedError
def backwards_code(self):
raise NotImplementedError
def add_forwards(self, forwards):
if self.prepend_forwards:
forwards.insert(0, self.forwards_code())
else:
forwards.append(self.forwards_code())
def add_backwards(self, backwards):
if self.prepend_backwards:
backwards.insert(0, self.backwards_code())
else:
backwards.append(self.backwards_code())
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
raise NotImplementedError
@classmethod
def triples_to_defs(cls, fields):
# Turn the (class, args, kwargs) format into a string
for field, triple in fields.items():
fields[field] = cls.triple_to_def(triple)
return fields
@classmethod
def triple_to_def(cls, triple):
"Turns a single triple into a definition."
return "self.gf(%r)(%s)" % (
triple[0], # Field full path
", ".join(triple[1] + ["%s=%s" % (kwd, val) for kwd, val in triple[2].items()]), # args and kwds
)
class AddModel(Action):
"""
Addition of a model. Takes the Model subclass that is being created.
"""
FORWARDS_TEMPLATE = '''
# Adding model '%(model_name)s'
db.create_table(%(table_name)r, (
%(field_defs)s
))
db.send_create_signal(%(app_label)r, [%(model_name)r])'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Deleting model '%(model_name)s'
db.delete_table(%(table_name)r)'''[1:] + "\n"
def __init__(self, model, model_def):
self.model = model
self.model_def = model_def
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added model %s.%s" % (
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
"Produces the code snippet that gets put into forwards()"
field_defs = ",\n ".join([
"(%r, %s)" % (name, defn) for name, defn
in self.triples_to_defs(self.model_def).items()
]) + ","
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"app_label": self.model._meta.app_label,
"field_defs": field_defs,
}
def backwards_code(self):
"Produces the code snippet that gets put into backwards()"
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
}
class DeleteModel(AddModel):
"""
Deletion of a model. Takes the Model subclass that is being created.
"""
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted model %s.%s" % (
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddModel.backwards_code(self)
def backwards_code(self):
return AddModel.forwards_code(self)
class _NullIssuesField(object):
"""
A field that might need to ask a question about rogue NULL values.
"""
issue_with_backward_migration = False
irreversible = False
IRREVERSIBLE_TEMPLATE = '''
# User chose to not deal with backwards NULL issues for '%(model_name)s.%(field_name)s'
raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration'''
def deal_with_not_null_no_default(self, field, field_def):
# If it's a CharField or TextField that's blank, skip this step.
if isinstance(field, (CharField, TextField)) and field.blank:
field_def[2]['default'] = repr("")
return
# Oh dear. Ask them what to do.
print(" ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % (
self.model._meta.object_name,
field.name,
))
print(" ? Since you are %s, you MUST specify a default" % self.null_reason)
print(" ? value to use for existing rows. Would you like to:")
print(" ? 1. Quit now"+("." if self.issue_with_backward_migration else ", and add a default to the field in models.py" ))
print(" ? 2. Specify a one-off value to use for existing columns now")
if self.issue_with_backward_migration:
print(" ? 3. Disable the backwards migration by raising an exception; you can edit the migration to fix it later")
while True:
choice = raw_input(" ? Please select a choice: ")
if choice == "1":
sys.exit(1)
elif choice == "2":
break
elif choice == "3" and self.issue_with_backward_migration:
break
else:
print(" ! Invalid choice.")
if choice == "2":
self.add_one_time_default(field, field_def)
elif choice == "3":
self.irreversible = True
def add_one_time_default(self, field, field_def):
# OK, they want to pick their own one-time default. Who are we to refuse?
print(" ? Please enter Python code for your one-off default value.")
print(" ? The datetime module is available, so you can do e.g. datetime.date.today()")
while True:
code = raw_input(" >>> ")
if not code:
print(" ! Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
result = eval(code, {}, {"datetime": datetime_utils})
except (SyntaxError, NameError) as e:
print(" ! Invalid input: %s" % e)
else:
break
# Right, add the default in.
field_def[2]['default'] = value_clean(result)
def irreversable_code(self, field):
return self.IRREVERSIBLE_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": field.name,
"field_column": field.column,
}
class AddField(Action, _NullIssuesField):
"""
Adds a field to a model. Takes a Model class and the field name.
"""
null_reason = "adding this field"
FORWARDS_TEMPLATE = '''
# Adding field '%(model_name)s.%(field_name)s'
db.add_column(%(table_name)r, %(field_name)r,
%(field_def)s,
keep_default=False)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Deleting field '%(model_name)s.%(field_name)s'
db.delete_column(%(table_name)r, %(field_column)r)'''[1:] + "\n"
def __init__(self, model, field, field_def):
self.model = model
self.field = field
self.field_def = field_def
# See if they've made a NOT NULL column but also have no default (far too common)
is_null = self.field.null
default = (self.field.default is not None) and (self.field.default is not NOT_PROVIDED)
if not is_null and not default:
self.deal_with_not_null_no_default(self.field, self.field_def)
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added field %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": self.field.name,
"field_column": self.field.column,
"field_def": self.triple_to_def(self.field_def),
}
def backwards_code(self):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": self.field.name,
"field_column": self.field.column,
}
class DeleteField(AddField):
"""
Removes a field from a model. Takes a Model class and the field name.
"""
null_reason = "removing this field"
issue_with_backward_migration = True
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted field %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddField.backwards_code(self)
def backwards_code(self):
if not self.irreversible:
return AddField.forwards_code(self)
else:
return self.irreversable_code(self.field) + AddField.forwards_code(self)
class ChangeField(Action, _NullIssuesField):
"""
Changes a field's type/options on a model.
"""
null_reason = "making this field non-nullable"
FORWARDS_TEMPLATE = BACKWARDS_TEMPLATE = '''
# Changing field '%(model_name)s.%(field_name)s'
db.alter_column(%(table_name)r, %(field_column)r, %(field_def)s)'''
RENAME_TEMPLATE = '''
# Renaming column for '%(model_name)s.%(field_name)s' to match new field type.
db.rename_column(%(table_name)r, %(old_column)r, %(new_column)r)'''
def __init__(self, model, old_field, new_field, old_def, new_def):
self.model = model
self.old_field = old_field
self.new_field = new_field
self.old_def = old_def
self.new_def = new_def
# See if they've changed a not-null field to be null
new_default = (self.new_field.default is not None) and (self.new_field.default is not NOT_PROVIDED)
old_default = (self.old_field.default is not None) and (self.old_field.default is not NOT_PROVIDED)
if self.old_field.null and not self.new_field.null and not new_default:
self.deal_with_not_null_no_default(self.new_field, self.new_def)
if not self.old_field.null and self.new_field.null and not old_default:
self.null_reason = "making this field nullable"
self.issue_with_backward_migration = True
self.deal_with_not_null_no_default(self.old_field, self.old_def)
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " ~ Changed field %s on %s.%s" % (
self.new_field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def _code(self, old_field, new_field, new_def):
output = ""
if self.old_field.column != self.new_field.column:
output += self.RENAME_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": new_field.name,
"old_column": old_field.column,
"new_column": new_field.column,
}
output += self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": new_field.name,
"field_column": new_field.column,
"field_def": self.triple_to_def(new_def),
}
return output
def forwards_code(self):
return self._code(self.old_field, self.new_field, self.new_def)
def backwards_code(self):
change_code = self._code(self.new_field, self.old_field, self.old_def)
if not self.irreversible:
return change_code
else:
return self.irreversable_code(self.old_field) + change_code
class AddUnique(Action):
"""
Adds a unique constraint to a model. Takes a Model class and the field names.
"""
FORWARDS_TEMPLATE = '''
# Adding unique constraint on '%(model_name)s', fields %(field_names)s
db.create_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing unique constraint on '%(model_name)s', fields %(field_names)s
db.delete_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
prepend_backwards = True
def __init__(self, model, fields):
self.model = model
self.fields = fields
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added unique constraint for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"fields": [field.column for field in self.fields],
"field_names": [field.name for field in self.fields],
}
def backwards_code(self):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"fields": [field.column for field in self.fields],
"field_names": [field.name for field in self.fields],
}
class DeleteUnique(AddUnique):
"""
Removes a unique constraint from a model. Takes a Model class and the field names.
"""
prepend_forwards = True
prepend_backwards = False
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted unique constraint for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddUnique.backwards_code(self)
def backwards_code(self):
return AddUnique.forwards_code(self)
class AddIndex(AddUnique):
"""
Adds an index to a model field[s]. Takes a Model class and the field names.
"""
FORWARDS_TEMPLATE = '''
# Adding index on '%(model_name)s', fields %(field_names)s
db.create_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing index on '%(model_name)s', fields %(field_names)s
db.delete_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added index for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
class DeleteIndex(AddIndex):
"""
Deletes an index off a model field[s]. Takes a Model class and the field names.
"""
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Deleted index for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddIndex.backwards_code(self)
def backwards_code(self):
return AddIndex.forwards_code(self)
class AddM2M(Action):
"""
Adds a unique constraint to a model. Takes a Model class and the field names.
"""
FORWARDS_TEMPLATE = '''
# Adding M2M table for field %(field_name)s on '%(model_name)s'
m2m_table_name = %(table_name)s
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
(%(left_field)r, models.ForeignKey(orm[%(left_model_key)r], null=False)),
(%(right_field)r, models.ForeignKey(orm[%(right_model_key)r], null=False))
))
db.create_unique(m2m_table_name, [%(left_column)r, %(right_column)r])'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing M2M table for field %(field_name)s on '%(model_name)s'
db.delete_table(%(table_name)s)'''[1:] + "\n"
def __init__(self, model, field):
self.model = model
self.field = field
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added M2M table for %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def table_name(self):
# This is part of a workaround for the fact that Django uses
# different shortening for automatically generated m2m table names
# (as opposed to any explicitly specified table name)
f = self.field
explicit = f.db_table
if explicit:
return "%r" % explicit
else:
auto = "%s_%s" % (self.model._meta.db_table, f.name)
return 'db.shorten_name(%r)' % auto
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"field_name": self.field.name,
"table_name": self.table_name(),
"left_field": self.field.m2m_column_name()[:-3], # Remove the _id part
"left_column": self.field.m2m_column_name(),
"left_model_key": model_key(self.model),
"right_field": self.field.m2m_reverse_name()[:-3], # Remove the _id part
"right_column": self.field.m2m_reverse_name(),
"right_model_key": model_key(self.field.rel.to),
}
def backwards_code(self):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"field_name": self.field.name,
"table_name": self.table_name(),
}
class DeleteM2M(AddM2M):
"""
Adds a unique constraint to a model. Takes a Model class and the field names.
"""
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted M2M table for %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddM2M.backwards_code(self)
def backwards_code(self):
return AddM2M.forwards_code(self)

506
south/creator/changes.py Normal file
View File

@ -0,0 +1,506 @@
"""
Contains things to detect changes - either using options passed in on the
commandline, or by using autodetection, etc.
"""
from __future__ import print_function
from django.db import models
from django.contrib.contenttypes.generic import GenericRelation
from django.utils.datastructures import SortedDict
from south.creator.freezer import remove_useless_attributes, freeze_apps, model_key
from south.utils import auto_through
from south.utils.py3 import string_types
class BaseChanges(object):
"""
Base changes class.
"""
def suggest_name(self):
return ''
def split_model_def(self, model, model_def):
"""
Given a model and its model def (a dict of field: triple), returns three
items: the real fields dict, the Meta dict, and the M2M fields dict.
"""
real_fields = SortedDict()
meta = SortedDict()
m2m_fields = SortedDict()
for name, triple in model_def.items():
if name == "Meta":
meta = triple
elif isinstance(model._meta.get_field_by_name(name)[0], models.ManyToManyField):
m2m_fields[name] = triple
else:
real_fields[name] = triple
return real_fields, meta, m2m_fields
def current_model_from_key(self, key):
app_label, model_name = key.split(".")
return models.get_model(app_label, model_name)
def current_field_from_key(self, key, fieldname):
app_label, model_name = key.split(".")
# Special, for the magical field from order_with_respect_to
if fieldname == "_order":
field = models.IntegerField()
field.name = "_order"
field.attname = "_order"
field.column = "_order"
field.default = 0
return field
# Otherwise, normal.
return models.get_model(app_label, model_name)._meta.get_field_by_name(fieldname)[0]
class AutoChanges(BaseChanges):
"""
Detects changes by 'diffing' two sets of frozen model definitions.
"""
# Field types we don't generate add/remove field changes for.
IGNORED_FIELD_TYPES = [
GenericRelation,
]
def __init__(self, migrations, old_defs, old_orm, new_defs):
self.migrations = migrations
self.old_defs = old_defs
self.old_orm = old_orm
self.new_defs = new_defs
def suggest_name(self):
parts = ["auto"]
for change_name, params in self.get_changes():
if change_name == "AddModel":
parts.append("add_%s" % params['model']._meta.object_name.lower())
elif change_name == "DeleteModel":
parts.append("del_%s" % params['model']._meta.object_name.lower())
elif change_name == "AddField":
parts.append("add_field_%s_%s" % (
params['model']._meta.object_name.lower(),
params['field'].name,
))
elif change_name == "DeleteField":
parts.append("del_field_%s_%s" % (
params['model']._meta.object_name.lower(),
params['field'].name,
))
elif change_name == "ChangeField":
parts.append("chg_field_%s_%s" % (
params['model']._meta.object_name.lower(),
params['new_field'].name,
))
elif change_name == "AddUnique":
parts.append("add_unique_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
elif change_name == "DeleteUnique":
parts.append("del_unique_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
elif change_name == "AddIndex":
parts.append("add_index_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
elif change_name == "DeleteIndex":
parts.append("del_index_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
return ("__".join(parts))[:70]
def get_changes(self):
"""
Returns the difference between the old and new sets of models as a 5-tuple:
added_models, deleted_models, added_fields, deleted_fields, changed_fields
"""
deleted_models = set()
# See if anything's vanished
for key in self.old_defs:
if key not in self.new_defs:
# We shouldn't delete it if it was managed=False
old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
if old_meta.get("managed", "True") != "False":
# Alright, delete it.
yield ("DeleteModel", {
"model": self.old_orm[key],
"model_def": old_fields,
})
# Also make sure we delete any M2Ms it had.
for fieldname in old_m2ms:
# Only delete its stuff if it wasn't a through=.
field = self.old_orm[key + ":" + fieldname]
if auto_through(field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
# And any index/uniqueness constraints it had
for attr, operation in (("unique_together", "DeleteUnique"), ("index_together", "DeleteIndex")):
together = eval(old_meta.get(attr, "[]"))
if together:
# If it's only a single tuple, make it into the longer one
if isinstance(together[0], string_types):
together = [together]
# For each combination, make an action for it
for fields in together:
yield (operation, {
"model": self.old_orm[key],
"fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields],
})
# We always add it in here so we ignore it later
deleted_models.add(key)
# Or appeared
for key in self.new_defs:
if key not in self.old_defs:
# We shouldn't add it if it's managed=False
new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
if new_meta.get("managed", "True") != "False":
yield ("AddModel", {
"model": self.current_model_from_key(key),
"model_def": new_fields,
})
# Also make sure we add any M2Ms it has.
for fieldname in new_m2ms:
# Only create its stuff if it wasn't a through=.
field = self.current_field_from_key(key, fieldname)
if auto_through(field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
# And any index/uniqueness constraints it has
for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")):
together = eval(new_meta.get(attr, "[]"))
if together:
# If it's only a single tuple, make it into the longer one
if isinstance(together[0], string_types):
together = [together]
# For each combination, make an action for it
for fields in together:
yield (operation, {
"model": self.current_model_from_key(key),
"fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields],
})
# Now, for every model that's stayed the same, check its fields.
for key in self.old_defs:
if key not in deleted_models:
old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
# Do nothing for models which are now not managed.
if new_meta.get("managed", "True") == "False":
continue
# Find fields that have vanished.
for fieldname in old_fields:
if fieldname not in new_fields:
# Don't do it for any fields we're ignoring
field = self.old_orm[key + ":" + fieldname]
field_allowed = True
for field_type in self.IGNORED_FIELD_TYPES:
if isinstance(field, field_type):
field_allowed = False
if field_allowed:
# Looks alright.
yield ("DeleteField", {
"model": self.old_orm[key],
"field": field,
"field_def": old_fields[fieldname],
})
# And ones that have appeared
for fieldname in new_fields:
if fieldname not in old_fields:
# Don't do it for any fields we're ignoring
field = self.current_field_from_key(key, fieldname)
field_allowed = True
for field_type in self.IGNORED_FIELD_TYPES:
if isinstance(field, field_type):
field_allowed = False
if field_allowed:
# Looks alright.
yield ("AddField", {
"model": self.current_model_from_key(key),
"field": field,
"field_def": new_fields[fieldname],
})
# Find M2Ms that have vanished
for fieldname in old_m2ms:
if fieldname not in new_m2ms:
# Only delete its stuff if it wasn't a through=.
field = self.old_orm[key + ":" + fieldname]
if auto_through(field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
# Find M2Ms that have appeared
for fieldname in new_m2ms:
if fieldname not in old_m2ms:
# Only create its stuff if it wasn't a through=.
field = self.current_field_from_key(key, fieldname)
if auto_through(field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
# For the ones that exist in both models, see if they were changed
for fieldname in set(old_fields).intersection(set(new_fields)):
# Non-index changes
if self.different_attributes(
remove_useless_attributes(old_fields[fieldname], True, True),
remove_useless_attributes(new_fields[fieldname], True, True)):
yield ("ChangeField", {
"model": self.current_model_from_key(key),
"old_field": self.old_orm[key + ":" + fieldname],
"new_field": self.current_field_from_key(key, fieldname),
"old_def": old_fields[fieldname],
"new_def": new_fields[fieldname],
})
# Index changes
old_field = self.old_orm[key + ":" + fieldname]
new_field = self.current_field_from_key(key, fieldname)
if not old_field.db_index and new_field.db_index:
# They've added an index.
yield ("AddIndex", {
"model": self.current_model_from_key(key),
"fields": [new_field],
})
if old_field.db_index and not new_field.db_index:
# They've removed an index.
yield ("DeleteIndex", {
"model": self.old_orm[key],
"fields": [old_field],
})
# See if their uniques have changed
if old_field.unique != new_field.unique:
# Make sure we look at the one explicitly given to see what happened
if new_field.unique:
yield ("AddUnique", {
"model": self.current_model_from_key(key),
"fields": [new_field],
})
else:
yield ("DeleteUnique", {
"model": self.old_orm[key],
"fields": [old_field],
})
# See if there's any M2Ms that have changed.
for fieldname in set(old_m2ms).intersection(set(new_m2ms)):
old_field = self.old_orm[key + ":" + fieldname]
new_field = self.current_field_from_key(key, fieldname)
# Have they _added_ a through= ?
if auto_through(old_field) and not auto_through(new_field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": old_field})
# Have they _removed_ a through= ?
if not auto_through(old_field) and auto_through(new_field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": new_field})
## See if the {index,unique}_togethers have changed
for attr, add_operation, del_operation in (("unique_together", "AddUnique", "DeleteUnique"), ("index_together", "AddIndex", "DeleteIndex")):
# First, normalise them into lists of sets.
old_together = eval(old_meta.get(attr, "[]"))
new_together = eval(new_meta.get(attr, "[]"))
if old_together and isinstance(old_together[0], string_types):
old_together = [old_together]
if new_together and isinstance(new_together[0], string_types):
new_together = [new_together]
old_together = frozenset(tuple(o) for o in old_together)
new_together = frozenset(tuple(n) for n in new_together)
# See if any appeared or disappeared
disappeared = old_together.difference(new_together)
appeared = new_together.difference(old_together)
for item in disappeared:
yield (del_operation, {
"model": self.old_orm[key],
"fields": [self.old_orm[key + ":" + x] for x in item],
})
for item in appeared:
yield (add_operation, {
"model": self.current_model_from_key(key),
"fields": [self.current_field_from_key(key, x) for x in item],
})
@classmethod
def is_triple(cls, triple):
"Returns whether the argument is a triple."
return isinstance(triple, (list, tuple)) and len(triple) == 3 and \
isinstance(triple[0], string_types) and \
isinstance(triple[1], (list, tuple)) and \
isinstance(triple[2], dict)
@classmethod
def different_attributes(cls, old, new):
"""
Backwards-compat comparison that ignores orm. on the RHS and not the left
and which knows django.db.models.fields.CharField = models.CharField.
Has a whole load of tests in tests/autodetection.py.
"""
# If they're not triples, just do normal comparison
if not cls.is_triple(old) or not cls.is_triple(new):
return old != new
# Expand them out into parts
old_field, old_pos, old_kwd = old
new_field, new_pos, new_kwd = new
# Copy the positional and keyword arguments so we can compare them and pop off things
old_pos, new_pos = old_pos[:], new_pos[:]
old_kwd = dict(old_kwd.items())
new_kwd = dict(new_kwd.items())
# Remove comparison of the existence of 'unique', that's done elsewhere.
# TODO: Make this work for custom fields where unique= means something else?
if "unique" in old_kwd:
del old_kwd['unique']
if "unique" in new_kwd:
del new_kwd['unique']
# If the first bit is different, check it's not by dj.db.models...
if old_field != new_field:
if old_field.startswith("models.") and (new_field.startswith("django.db.models") \
or new_field.startswith("django.contrib.gis")):
if old_field.split(".")[-1] != new_field.split(".")[-1]:
return True
else:
# Remove those fields from the final comparison
old_field = new_field = ""
# If there's a positional argument in the first, and a 'to' in the second,
# see if they're actually comparable.
if (old_pos and "to" in new_kwd) and ("orm" in new_kwd['to'] and "orm" not in old_pos[0]):
# Do special comparison to fix #153
try:
if old_pos[0] != new_kwd['to'].split("'")[1].split(".")[1]:
return True
except IndexError:
pass # Fall back to next comparison
# Remove those attrs from the final comparison
old_pos = old_pos[1:]
del new_kwd['to']
return old_field != new_field or old_pos != new_pos or old_kwd != new_kwd
class ManualChanges(BaseChanges):
"""
Detects changes by reading the command line.
"""
def __init__(self, migrations, added_models, added_fields, added_indexes):
self.migrations = migrations
self.added_models = added_models
self.added_fields = added_fields
self.added_indexes = added_indexes
def suggest_name(self):
bits = []
for model_name in self.added_models:
bits.append('add_model_%s' % model_name)
for field_name in self.added_fields:
bits.append('add_field_%s' % field_name)
for index_name in self.added_indexes:
bits.append('add_index_%s' % index_name)
return '_'.join(bits).replace('.', '_')
def get_changes(self):
# Get the model defs so we can use them for the yield later
model_defs = freeze_apps([self.migrations.app_label()])
# Make the model changes
for model_name in self.added_models:
model = models.get_model(self.migrations.app_label(), model_name)
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
yield ("AddModel", {
"model": model,
"model_def": real_fields,
})
# And the field changes
for field_desc in self.added_fields:
try:
model_name, field_name = field_desc.split(".")
except (TypeError, ValueError):
raise ValueError("%r is not a valid field description." % field_desc)
model = models.get_model(self.migrations.app_label(), model_name)
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
yield ("AddField", {
"model": model,
"field": model._meta.get_field_by_name(field_name)[0],
"field_def": real_fields[field_name],
})
# And the indexes
for field_desc in self.added_indexes:
try:
model_name, field_name = field_desc.split(".")
except (TypeError, ValueError):
print("%r is not a valid field description." % field_desc)
model = models.get_model(self.migrations.app_label(), model_name)
yield ("AddIndex", {
"model": model,
"fields": [model._meta.get_field_by_name(field_name)[0]],
})
class InitialChanges(BaseChanges):
"""
Creates all models; handles --initial.
"""
def suggest_name(self):
return 'initial'
def __init__(self, migrations):
self.migrations = migrations
def get_changes(self):
# Get the frozen models for this app
model_defs = freeze_apps([self.migrations.app_label()])
for model in models.get_models(models.get_app(self.migrations.app_label())):
# Don't do anything for unmanaged, abstract or proxy models
if model._meta.abstract or getattr(model._meta, "proxy", False) or not getattr(model._meta, "managed", True):
continue
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
# Firstly, add the main table and fields
yield ("AddModel", {
"model": model,
"model_def": real_fields,
})
# Then, add any indexing/uniqueness that's around
if meta:
for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")):
together = eval(meta.get(attr, "[]"))
if together:
# If it's only a single tuple, make it into the longer one
if isinstance(together[0], string_types):
together = [together]
# For each combination, make an action for it
for fields in together:
yield (operation, {
"model": model,
"fields": [model._meta.get_field_by_name(x)[0] for x in fields],
})
# Finally, see if there's some M2M action
for name, triple in m2m_fields.items():
field = model._meta.get_field_by_name(name)[0]
# But only if it's not through=foo (#120)
if field.rel.through:
try:
# Django 1.1 and below
through_model = field.rel.through_model
except AttributeError:
# Django 1.2
through_model = field.rel.through
if (not field.rel.through) or getattr(through_model._meta, "auto_created", False):
yield ("AddM2M", {
"model": model,
"field": field,
})

192
south/creator/freezer.py Normal file
View File

@ -0,0 +1,192 @@
"""
Handles freezing of models into FakeORMs.
"""
from __future__ import print_function
import sys
from django.db import models
from django.db.models.base import ModelBase, Model
from django.contrib.contenttypes.generic import GenericRelation
from south.utils import get_attribute, auto_through
from south import modelsinspector
from south.utils.py3 import string_types
def freeze_apps(apps):
"""
Takes a list of app labels, and returns a string of their frozen form.
"""
if isinstance(apps, string_types):
apps = [apps]
frozen_models = set()
# For each app, add in all its models
for app in apps:
for model in models.get_models(models.get_app(app)):
# Only add if it's not abstract or proxy
if not model._meta.abstract and not getattr(model._meta, "proxy", False):
frozen_models.add(model)
# Now, add all the dependencies
for model in list(frozen_models):
frozen_models.update(model_dependencies(model))
# Serialise!
model_defs = {}
model_classes = {}
for model in frozen_models:
model_defs[model_key(model)] = prep_for_freeze(model)
model_classes[model_key(model)] = model
# Check for any custom fields that failed to freeze.
missing_fields = False
for key, fields in model_defs.items():
for field_name, value in fields.items():
if value is None:
missing_fields = True
model_class = model_classes[key]
field_class = model_class._meta.get_field_by_name(field_name)[0]
print(" ! Cannot freeze field '%s.%s'" % (key, field_name))
print(" ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__))
if missing_fields:
print("")
print(" ! South cannot introspect some fields; this is probably because they are custom")
print(" ! fields. If they worked in 0.6 or below, this is because we have removed the")
print(" ! models parser (it often broke things).")
print(" ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork")
sys.exit(1)
return model_defs
def freeze_apps_to_string(apps):
return pprint_frozen_models(freeze_apps(apps))
###
def model_key(model):
"For a given model, return 'appname.modelname'."
return "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
def prep_for_freeze(model):
"""
Takes a model and returns the ready-to-serialise dict (all you need
to do is just pretty-print it).
"""
fields = modelsinspector.get_model_fields(model, m2m=True)
# Remove useless attributes (like 'choices')
for name, field in fields.items():
fields[name] = remove_useless_attributes(field)
# See if there's a Meta
fields['Meta'] = remove_useless_meta(modelsinspector.get_model_meta(model))
# Add in our own special items to track the object name and managed
fields['Meta']['object_name'] = model._meta.object_name # Special: not eval'able.
if not getattr(model._meta, "managed", True):
fields['Meta']['managed'] = repr(model._meta.managed)
return fields
### Dependency resolvers
def model_dependencies(model, checked_models=None):
"""
Returns a set of models this one depends on to be defined; things like
OneToOneFields as ID, ForeignKeys everywhere, etc.
"""
depends = set()
checked_models = checked_models or set()
# Get deps for each field
for field in model._meta.fields + model._meta.many_to_many:
depends.update(field_dependencies(field, checked_models))
# Add in any non-abstract bases
for base in model.__bases__:
if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract:
depends.add(base)
# Now recurse
new_to_check = depends - checked_models
while new_to_check:
checked_model = new_to_check.pop()
if checked_model == model or checked_model in checked_models:
continue
checked_models.add(checked_model)
deps = model_dependencies(checked_model, checked_models)
# Loop through dependencies...
for dep in deps:
# If the new dep is not already checked, add to the queue
if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models):
new_to_check.add(dep)
depends.add(dep)
return depends
def field_dependencies(field, checked_models=None):
checked_models = checked_models or set()
depends = set()
arg_defs, kwarg_defs = modelsinspector.matching_details(field)
for attrname, options in arg_defs + list(kwarg_defs.values()):
if options.get("ignore_if_auto_through", False) and auto_through(field):
continue
if options.get("is_value", False):
value = attrname
elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'):
# Hack for django 1.1 and below, where the through model is stored
# in rel.through_model while rel.through stores only the model name.
value = field.rel.through_model
else:
try:
value = get_attribute(field, attrname)
except AttributeError:
if options.get("ignore_missing", False):
continue
raise
if isinstance(value, Model):
value = value.__class__
if not isinstance(value, ModelBase):
continue
if getattr(value._meta, "proxy", False):
value = value._meta.proxy_for_model
if value in checked_models:
continue
checked_models.add(value)
depends.add(value)
depends.update(model_dependencies(value, checked_models))
return depends
### Prettyprinters
def pprint_frozen_models(models):
return "{\n %s\n }" % ",\n ".join([
"%r: %s" % (name, pprint_fields(fields))
for name, fields in sorted(models.items())
])
def pprint_fields(fields):
return "{\n %s\n }" % ",\n ".join([
"%r: %r" % (name, defn)
for name, defn in sorted(fields.items())
])
### Output sanitisers
USELESS_KEYWORDS = ["choices", "help_text", "verbose_name"]
USELESS_DB_KEYWORDS = ["related_name", "default", "blank"] # Important for ORM, not for DB.
INDEX_KEYWORDS = ["db_index"]
def remove_useless_attributes(field, db=False, indexes=False):
"Removes useless (for database) attributes from the field's defn."
# Work out what to remove, and remove it.
keywords = USELESS_KEYWORDS[:]
if db:
keywords += USELESS_DB_KEYWORDS[:]
if indexes:
keywords += INDEX_KEYWORDS[:]
if field:
for name in keywords:
if name in field[2]:
del field[2][name]
return field
USELESS_META = ["verbose_name", "verbose_name_plural"]
def remove_useless_meta(meta):
"Removes useless (for database) attributes from the table's meta."
if meta:
for name in USELESS_META:
if name in meta:
del meta[name]
return meta

84
south/db/__init__.py Normal file
View File

@ -0,0 +1,84 @@
# Establish the common DatabaseOperations instance, which we call 'db'.
# Much thanks to cmkmrr for a lot of the code base here
from django.conf import settings
import sys
# A few aliases, because there's FQMNs now
engine_modules = {
'django.db.backends.postgresql_psycopg2': 'postgresql_psycopg2',
'django.db.backends.sqlite3': 'sqlite3',
'django.db.backends.mysql': 'mysql',
'mysql_oursql.standard': 'mysql',
'django.db.backends.oracle': 'oracle',
'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc-azure
'django_pyodbc': 'sql_server.pyodbc', #django-pyodbc
'sqlserver_ado': 'sql_server.pyodbc', #django-mssql
'firebird': 'firebird', #django-firebird
'django.contrib.gis.db.backends.postgis': 'postgresql_psycopg2',
'django.contrib.gis.db.backends.spatialite': 'sqlite3',
'django.contrib.gis.db.backends.mysql': 'mysql',
'django.contrib.gis.db.backends.oracle': 'oracle',
'doj.backends.zxjdbc.postgresql': 'postgresql_psycopg2', #django-jython
'doj.backends.zxjdbc.mysql': 'mysql', #django-jython
'doj.backends.zxjdbc.oracle': 'oracle', #django-jython
'mysql.connector.django': 'mysql', # MySQL Connector/Python
}
# First, work out if we're multi-db or not, and which databases we have
try:
from django.db import DEFAULT_DB_ALIAS
except ImportError:
#### 1.1 or below ####
# We'll 'fake' multi-db; set the default alias
DEFAULT_DB_ALIAS = 'default'
# SOUTH_DATABASE_ADAPTER is an optional override if you have a different module
engine = getattr(settings, "SOUTH_DATABASE_ADAPTER", "south.db.%s" % settings.DATABASE_ENGINE)
# And then, we have one database with one engine
db_engines = {DEFAULT_DB_ALIAS: engine}
else:
#### 1.2 or above ####
# Loop over the defined databases, gathering up their engines
db_engines = dict([
# Note we check to see if contrib.gis has overridden us.
(alias, "south.db.%s" % engine_modules[db_settings['ENGINE']])
for alias, db_settings in settings.DATABASES.items()
if db_settings['ENGINE'] in engine_modules
])
# Update with any overrides
db_engines.update(getattr(settings, "SOUTH_DATABASE_ADAPTERS", {}))
# Check there's no None engines, or...
for alias, engine in db_engines.items():
if engine is None:
# They've used a backend we don't support
sys.stderr.write(
(
"There is no South database module for your database backend '%s'. " + \
"Please either choose a supported database, check for " + \
"SOUTH_DATABASE_ADAPTER[S] settings, " + \
"or remove South from INSTALLED_APPS.\n"
) % (settings.DATABASES[alias]['ENGINE'],)
)
sys.exit(1)
# Now, turn that into a dict of <alias: south db module>
dbs = {}
try:
for alias, module_name in db_engines.items():
module = __import__(module_name, {}, {}, [''])
dbs[alias] = module.DatabaseOperations(alias)
except ImportError:
# This error should only be triggered on 1.1 and below.
sys.stderr.write(
(
"There is no South database module '%s' for your database. " + \
"Please either choose a supported database, check for " + \
"SOUTH_DATABASE_ADAPTER[S] settings, " + \
"or remove South from INSTALLED_APPS.\n"
) % (module_name,)
)
sys.exit(1)
# Finally, to make old migrations work, keep 'db' around as the default database
db = dbs[DEFAULT_DB_ALIAS]

362
south/db/firebird.py Normal file
View File

@ -0,0 +1,362 @@
# firebird
from __future__ import print_function
import datetime
from django.db import connection, models
from django.core.management.color import no_style
from django.db.utils import DatabaseError
from south.db import generic
from south.utils.py3 import string_types
class DatabaseOperations(generic.DatabaseOperations):
backend_name = 'firebird'
alter_string_set_type = 'ALTER %(column)s TYPE %(type)s'
alter_string_set_default = 'ALTER %(column)s SET DEFAULT %(default)s;'
alter_string_drop_null = ''
add_column_string = 'ALTER TABLE %s ADD %s;'
delete_column_string = 'ALTER TABLE %s DROP %s;'
rename_table_sql = ''
# Features
allows_combined_alters = False
has_booleans = False
def _fill_constraint_cache(self, db_name, table_name):
self._constraint_cache.setdefault(db_name, {})
self._constraint_cache[db_name][table_name] = {}
rows = self.execute("""
SELECT
rc.RDB$CONSTRAINT_NAME,
rc.RDB$CONSTRAINT_TYPE,
cc.RDB$TRIGGER_NAME
FROM rdb$relation_constraints rc
JOIN rdb$check_constraints cc
ON rc.rdb$constraint_name = cc.rdb$constraint_name
WHERE rc.rdb$constraint_type = 'NOT NULL'
AND rc.rdb$relation_name = '%s'
""" % table_name)
for constraint, kind, column in rows:
self._constraint_cache[db_name][table_name].setdefault(column, set())
self._constraint_cache[db_name][table_name][column].add((kind, constraint))
return
def _alter_column_set_null(self, table_name, column_name, is_null):
sql = """
UPDATE RDB$RELATION_FIELDS SET RDB$NULL_FLAG = %(null_flag)s
WHERE RDB$FIELD_NAME = '%(column)s'
AND RDB$RELATION_NAME = '%(table_name)s'
"""
null_flag = 'NULL' if is_null else '1'
return sql % {
'null_flag': null_flag,
'column': column_name.upper(),
'table_name': table_name.upper()
}
def _column_has_default(self, params):
sql = """
SELECT a.RDB$DEFAULT_VALUE
FROM RDB$RELATION_FIELDS a
WHERE a.RDB$FIELD_NAME = '%(column)s'
AND a.RDB$RELATION_NAME = '%(table_name)s'
"""
value = self.execute(sql % params)
return True if value else False
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# Historically, we used to set defaults here.
# But since South 0.8, we don't ever set defaults on alter-column -- we only
# use database-level defaults as scaffolding when adding columns.
# However, we still sometimes need to remove defaults in alter-column.
if self._column_has_default(params):
sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
@generic.invalidate_table_constraints
def create_table(self, table_name, fields):
columns = []
autoinc_sql = ''
for field_name, field in fields:
# avoid default values in CREATE TABLE statements (#925)
field._suppress_default = True
col = self.column_sql(table_name, field_name, field)
if not col:
continue
columns.append(col)
if isinstance(field, models.AutoField):
field_name = field.db_column or field.column
autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
self.execute(self.create_table_sql % {
"table": self.quote_name(table_name),
"columns": ', '.join([col for col in columns if col]),
})
if autoinc_sql:
self.execute(autoinc_sql[0])
self.execute(autoinc_sql[1])
def rename_table(self, old_table_name, table_name):
"""
Renames table is not supported by firebird.
This involve recreate all related objects (store procedure, views, triggers, etc)
"""
pass
@generic.invalidate_table_constraints
def delete_table(self, table_name, cascade=False):
"""
Deletes the table 'table_name'.
Firebird will also delete any triggers associated with the table.
"""
super(DatabaseOperations, self).delete_table(table_name, cascade=False)
# Also, drop sequence if exists
sql = connection.ops.drop_sequence_sql(table_name)
if sql:
try:
self.execute(sql)
except:
pass
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
"""
Creates the SQL snippet for a column. Used by add_column and add_table.
"""
# If the field hasn't already been told its attribute name, do so.
if not field_prepared:
field.set_attributes_from_name(field_name)
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
field = self._field_sanity(field)
try:
sql = field.db_type(connection=self._get_connection())
except TypeError:
sql = field.db_type()
if sql:
# Some callers, like the sqlite stuff, just want the extended type.
if with_name:
field_output = [self.quote_name(field.column), sql]
else:
field_output = [sql]
if field.primary_key:
field_output.append('NOT NULL PRIMARY KEY')
elif field.unique:
# Just use UNIQUE (no indexes any more, we have delete_unique)
field_output.append('UNIQUE')
sql = ' '.join(field_output)
sqlparams = ()
# if the field is "NOT NULL" and a default value is provided, create the column with it
# this allows the addition of a NOT NULL field to a table with existing rows
if not getattr(field, '_suppress_default', False):
if field.has_default():
default = field.get_default()
# If the default is actually None, don't add a default term
if default is not None:
# If the default is a callable, then call it!
if callable(default):
default = default()
# Now do some very cheap quoting. TODO: Redesign return values to avoid this.
if isinstance(default, string_types):
default = "'%s'" % default.replace("'", "''")
elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
default = "'%s'" % default
elif isinstance(default, bool):
default = int(default)
# Escape any % signs in the output (bug #317)
if isinstance(default, string_types):
default = default.replace("%", "%%")
# Add it in
sql += " DEFAULT %s"
sqlparams = (default)
elif (not field.null and field.blank) or (field.get_default() == ''):
if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
sql += " DEFAULT ''"
# Error here would be nice, but doesn't seem to play fair.
#else:
# raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
# Firebird need set not null after of default value keyword
if not field.primary_key and not field.null:
sql += ' NOT NULL'
if field.rel and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
# Things like the contrib.gis module fields have this in 1.1 and below
if hasattr(field, 'post_create_sql'):
for stmt in field.post_create_sql(no_style(), table_name):
self.add_deferred_sql(stmt)
# Avoid double index creation (#1317)
# Firebird creates an index implicity for each foreign key field
# sql_indexes_for_field tries to create an index for that field too
if not field.rel:
# In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
# This also creates normal indexes in 1.1.
if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
# Make a fake model to pass in, with only db_table
model = self.mock_model("FakeModelForGISCreation", table_name)
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
self.add_deferred_sql(stmt)
if sql:
return sql % sqlparams
else:
return None
def _drop_constraints(self, table_name, name, field):
if self.has_check_constraints:
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
for constraint in check_constraints:
self.execute(self.delete_check_sql % {
'table': self.quote_name(table_name),
'constraint': self.quote_name(constraint),
})
# Drop or add UNIQUE constraint
unique_constraint = list(self._constraints_affecting_columns(table_name, [name], "UNIQUE"))
if field.unique and not unique_constraint:
self.create_unique(table_name, [name])
elif not field.unique and unique_constraint:
self.delete_unique(table_name, [name])
# Drop all foreign key constraints
try:
self.delete_foreign_key(table_name, name)
except ValueError:
# There weren't any
pass
@generic.invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Alters the given column name so it will match the given field.
Note that conversion between the two by the database must be possible.
Will not automatically add _id by default; to have this behavour, pass
explicit_name=False.
@param table_name: The name of the table to add the column to
@param name: The name of the column to alter
@param field: The new field definition to use
"""
if self.dry_run:
if self.debug:
print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
return
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Add _id or whatever if we need to
field.set_attributes_from_name(name)
if not explicit_name:
name = field.column
else:
field.column = name
if not ignore_constraints:
# Drop all check constraints. Note that constraints will be added back
# with self.alter_string_set_type and self.alter_string_drop_null.
self._drop_constraints(table_name, name, field)
# First, change the type
params = {
"column": self.quote_name(name),
"type": self._db_type_for_alter_column(field),
"table_name": table_name
}
# SQLs is a list of (SQL, values) pairs.
sqls = []
sqls_extra = []
# Only alter the column if it has a type (Geometry ones sometimes don't)
if params["type"] is not None:
sqls.append((self.alter_string_set_type % params, []))
# Add any field- and backend- specific modifications
self._alter_add_column_mods(field, name, params, sqls)
# Next, nullity: modified, firebird doesn't support DROP NOT NULL
sqls_extra.append(self._alter_column_set_null(table_name, name, field.null))
# Next, set any default
self._alter_set_defaults(field, name, params, sqls)
# Finally, actually change the column
if self.allows_combined_alters:
sqls, values = list(zip(*sqls))
self.execute(
"ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
generic.flatten(values),
)
else:
# Databases like e.g. MySQL don't like more than one alter at once.
for sql, values in sqls:
try:
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
except DatabaseError as e:
print(e)
# Execute extra sql, which don't need ALTER TABLE statement
for sql in sqls_extra:
self.execute(sql)
if not ignore_constraints:
# Add back FK constraints if needed
if field.rel and self.supports_foreign_keys:
self.execute(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
@generic.copy_column_constraints
@generic.delete_column_constraints
def rename_column(self, table_name, old, new):
if old == new:
# Short-circuit out
return []
self.execute('ALTER TABLE %s ALTER %s TO %s;' % (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
))

1164
south/db/generic.py Normal file

File diff suppressed because it is too large Load Diff

290
south/db/mysql.py Normal file
View File

@ -0,0 +1,290 @@
# MySQL-specific implementations for south
# Original author: Andrew Godwin
# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
from south.db import generic
from south.db.generic import DryRunError, INVALID
from south.logger import get_logger
def delete_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Deletes the constraints from the database and clears local cache.
"""
def _column_rm(self, table_name, column_name, *args, **opts):
# Delete foreign key constraints
try:
self.delete_foreign_key(table_name, column_name)
except ValueError:
pass # If no foreign key on column, OK because it checks first
# Delete constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_name)
for cname, rtable, rcolumn in reverse:
self.delete_foreign_key(rtable, rcolumn)
except DryRunError:
pass
return func(self, table_name, column_name, *args, **opts)
return _column_rm
def copy_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Determines existing constraints and copies them to a new column
"""
def _column_cp(self, table_name, column_old, column_new, *args, **opts):
# Copy foreign key constraint
try:
constraint = self._find_foreign_constraints(
table_name, column_old)[0]
refs = self._lookup_constraint_references(table_name, constraint)
if refs is not None:
(ftable, fcolumn) = refs
if ftable and fcolumn:
fk_sql = self.foreign_key_sql(
table_name, column_new, ftable, fcolumn)
get_logger().debug("Foreign key SQL: " + fk_sql)
self.add_deferred_sql(fk_sql)
except IndexError:
pass # No constraint exists so ignore
except DryRunError:
pass
# Copy constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_old)
for cname, rtable, rcolumn in reverse:
fk_sql = self.foreign_key_sql(
rtable, rcolumn, table_name, column_new)
self.add_deferred_sql(fk_sql)
except DryRunError:
pass
return func(self, table_name, column_old, column_new, *args, **opts)
return _column_cp
def invalidate_table_constraints(func):
"""
For MySQL we grab all table constraints simultaneously, so this is
effective.
It further solves the issues of invalidating referred table constraints.
"""
def _cache_clear(self, table, *args, **opts):
db_name = self._get_setting('NAME')
if db_name in self._constraint_cache:
del self._constraint_cache[db_name]
if db_name in self._reverse_cache:
del self._reverse_cache[db_name]
if db_name in self._constraint_references:
del self._constraint_references[db_name]
return func(self, table, *args, **opts)
return _cache_clear
class DatabaseOperations(generic.DatabaseOperations):
"""
MySQL implementation of database operations.
MySQL has no DDL transaction support This can confuse people when they ask
how to roll back - hence the dry runs, etc., found in the migration code.
"""
backend_name = "mysql"
alter_string_set_type = ''
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY"
delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s"
delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
rename_table_sql = "RENAME TABLE %s TO %s;"
allows_combined_alters = False
has_check_constraints = False
raises_default_errors = False
geom_types = ['geometry', 'point', 'linestring', 'polygon']
text_types = ['text', 'blob']
def __init__(self, db_alias):
self._constraint_references = {}
self._reverse_cache = {}
super(DatabaseOperations, self).__init__(db_alias)
if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
self.create_table_sql = self.create_table_sql + ' ENGINE=%s' % self._get_setting('STORAGE_ENGINE')
def _is_valid_cache(self, db_name, table_name):
cache = self._constraint_cache
# we cache the whole db so if there are any tables table_name is valid
return db_name in cache and cache[db_name].get(table_name, None) is not INVALID
def _fill_constraint_cache(self, db_name, table_name):
# for MySQL grab all constraints for this database. It's just as cheap as a single column.
self._constraint_cache[db_name] = {}
self._constraint_cache[db_name][table_name] = {}
self._reverse_cache[db_name] = {}
self._constraint_references[db_name] = {}
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`
FROM information_schema.key_column_usage AS kc
WHERE
kc.table_schema = %s
"""
rows = self.execute(name_query, [db_name])
if not rows:
return
cnames = {}
for constraint, column, table, ref_table, ref_column in rows:
key = (table, constraint)
cnames.setdefault(key, set())
cnames[key].add((column, ref_table, ref_column))
type_query = """
SELECT c.constraint_name, c.table_name, c.constraint_type
FROM information_schema.table_constraints AS c
WHERE
c.table_schema = %s
"""
rows = self.execute(type_query, [db_name])
for constraint, table, kind in rows:
key = (table, constraint)
self._constraint_cache[db_name].setdefault(table, {})
try:
cols = cnames[key]
except KeyError:
cols = set()
for column_set in cols:
(column, ref_table, ref_column) = column_set
self._constraint_cache[db_name][table].setdefault(column, set())
if kind == 'FOREIGN KEY':
self._constraint_cache[db_name][table][column].add((kind,
constraint))
# Create constraint lookup, see constraint_references
self._constraint_references[db_name][(table,
constraint)] = (ref_table, ref_column)
# Create reverse table lookup, reverse_lookup
self._reverse_cache[db_name].setdefault(ref_table, {})
self._reverse_cache[db_name][ref_table].setdefault(ref_column,
set())
self._reverse_cache[db_name][ref_table][ref_column].add(
(constraint, table, column))
else:
self._constraint_cache[db_name][table][column].add((kind,
constraint))
def connection_init(self):
"""
Run before any SQL to let database-specific config be sent as a command,
e.g. which storage engine (MySQL) or transaction serialisability level.
"""
cursor = self._get_connection().cursor()
if cursor.execute("SHOW variables WHERE Variable_Name='default_storage_engine';"):
engine_var = 'default_storage_engine'
else:
engine_var = 'storage_engine'
if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
cursor.execute("SET %s=%s;" % (engine_var, self._get_setting('STORAGE_ENGINE')))
def start_transaction(self):
super(DatabaseOperations, self).start_transaction()
self.execute("SET FOREIGN_KEY_CHECKS=0;")
@copy_column_constraints
@delete_column_constraints
@invalidate_table_constraints
def rename_column(self, table_name, old, new):
if old == new or self.dry_run:
return []
rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old]
if not rows:
raise ValueError("No column '%s' in '%s'." % (old, table_name))
params = (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
rows[0][1],
rows[0][2] == "YES" and "NULL" or "NOT NULL",
rows[0][4] and "DEFAULT " or "",
rows[0][4] and "%s" or "",
rows[0][5] or "",
)
sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params
if rows[0][4]:
self.execute(sql, (rows[0][4],))
else:
self.execute(sql)
@delete_column_constraints
def delete_column(self, table_name, name):
super(DatabaseOperations, self).delete_column(table_name, name)
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
super(DatabaseOperations, self).rename_table(old_table_name,
table_name)
@invalidate_table_constraints
def delete_table(self, table_name):
super(DatabaseOperations, self).delete_table(table_name)
def _lookup_constraint_references(self, table_name, cname):
"""
Provided an existing table and constraint, returns tuple of (foreign
table, column)
"""
db_name = self._get_setting('NAME')
try:
return self._constraint_references[db_name][(table_name, cname)]
except KeyError:
return None
def _lookup_reverse_constraint(self, table_name, column_name=None):
"""Look for the column referenced by a foreign constraint"""
db_name = self._get_setting('NAME')
if self.dry_run:
raise DryRunError("Cannot get constraints for columns.")
if not self._is_valid_cache(db_name, table_name):
# Piggy-back on lookup_constraint, ensures cache exists
self.lookup_constraint(db_name, table_name)
try:
table = self._reverse_cache[db_name][table_name]
if column_name == None:
return [(y, tuple(y)) for x, y in table.items()]
else:
return tuple(table[column_name])
except KeyError:
return []
def _field_sanity(self, field):
"""
This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
"""
# MySQL does not support defaults for geometry columns also
type = self._db_type_for_alter_column(field).lower()
is_geom = True in [type.find(t) > -1 for t in self.geom_types]
is_text = True in [type.find(t) > -1 for t in self.text_types]
if is_geom or is_text:
field._suppress_default = True
return field
def _alter_set_defaults(self, field, name, params, sqls):
"""
MySQL does not support defaults on text or blob columns.
"""
type = params['type']
# MySQL does not support defaults for geometry columns also
is_geom = True in [type.find(t) > -1 for t in self.geom_types]
is_text = True in [type.find(t) > -1 for t in self.text_types]
if not is_geom and not is_text:
super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls)

345
south/db/oracle.py Normal file
View File

@ -0,0 +1,345 @@
from __future__ import print_function
import os.path
import sys
import re
import warnings
import cx_Oracle
from django.db import connection, models
from django.db.backends.util import truncate_name
from django.core.management.color import no_style
from django.db.models.fields import NOT_PROVIDED
from django.db.utils import DatabaseError
# In revision r16016 function get_sequence_name has been transformed into
# method of DatabaseOperations class. To make code backward-compatible we
# need to handle both situations.
try:
from django.db.backends.oracle.base import get_sequence_name\
as original_get_sequence_name
except ImportError:
original_get_sequence_name = None
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
Oracle implementation of database operations.
"""
backend_name = 'oracle'
alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;'
alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;'
alter_string_update_nulls_to_default = \
'UPDATE %(table_name)s SET %(column)s = %(default)s WHERE %(column)s IS NULL;'
add_column_string = 'ALTER TABLE %s ADD %s;'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s'
allows_combined_alters = False
has_booleans = False
constraints_dict = {
'P': 'PRIMARY KEY',
'U': 'UNIQUE',
'C': 'CHECK',
'R': 'FOREIGN KEY'
}
def get_sequence_name(self, table_name):
if original_get_sequence_name is None:
return self._get_connection().ops._get_sequence_name(table_name)
else:
return original_get_sequence_name(table_name)
#TODO: This will cause very obscure bugs if anyone uses a column name or string value
# that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it)
# e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL'
def adj_column_sql(self, col):
# Syntax fixes -- Oracle is picky about clause order
col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)',
lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only
col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)',
lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT
return col
def check_meta(self, table_name):
return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':
nn = nn[1:-1]
return nn
@generic.invalidate_table_constraints
def create_table(self, table_name, fields):
qn = self.quote_name(table_name)
columns = []
autoinc_sql = ''
for field_name, field in fields:
field = self._field_sanity(field)
# avoid default values in CREATE TABLE statements (#925)
field._suppress_default = True
col = self.column_sql(table_name, field_name, field)
if not col:
continue
col = self.adj_column_sql(col)
columns.append(col)
if isinstance(field, models.AutoField):
autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns]))
self.execute(sql)
if autoinc_sql:
self.execute(autoinc_sql[0])
self.execute(autoinc_sql[1])
@generic.invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
qn = self.quote_name(table_name)
# Note: PURGE is not valid syntax for Oracle 9i (it was added in 10)
if cascade:
self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn)
else:
self.execute('DROP TABLE %s;' % qn)
# If the table has an AutoField a sequence was created.
sequence_sql = """
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 1 THEN
EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % {'sq_name': self.get_sequence_name(table_name)}
self.execute(sequence_sql)
@generic.invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
if self.dry_run:
if self.debug:
print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
return
qn = self.quote_name(table_name)
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
field = self._field_sanity(field)
# Add _id or whatever if we need to
field.set_attributes_from_name(name)
if not explicit_name:
name = field.column
qn_col = self.quote_name(name)
# First, change the type
# This will actually also add any CHECK constraints needed,
# since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))'
params = {
'table_name':qn,
'column': qn_col,
'type': self._db_type_for_alter_column(field),
'nullity': 'NOT NULL',
'default': 'NULL'
}
if field.null:
params['nullity'] = 'NULL'
sql_templates = [
(self.alter_string_set_type, params, []),
(self.alter_string_set_default, params, []),
]
if not field.null and field.has_default():
# Use default for rows that had nulls. To support the case where
# the new default does not fit the old type, we need to first change
# the column type to the new type, but null=True; then set the default;
# then complete the type change.
def change_params(**kw):
"A little helper for non-destructively changing the params"
p = params.copy()
p.update(kw)
return p
sql_templates[:0] = [
(self.alter_string_set_type, change_params(nullity='NULL'),[]),
(self.alter_string_update_nulls_to_default, change_params(default="%s"), [field.get_default()]),
]
if not ignore_constraints:
# drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements
# generated above, since those statements recreate the constraints we delete here.
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
for constraint in check_constraints:
self.execute(self.delete_check_sql % {
'table': self.quote_name(table_name),
'constraint': self.quote_name(constraint),
})
# Drop foreign constraints
try:
self.delete_foreign_key(qn, qn_col)
except ValueError:
# There weren't any
pass
for sql_template, params, args in sql_templates:
try:
self.execute(sql_template % params, args, print_all_errors=False)
except DatabaseError as exc:
description = str(exc)
# Oracle complains if a column is already NULL/NOT NULL
if 'ORA-01442' in description or 'ORA-01451' in description:
# so we just drop NULL/NOT NULL part from target sql and retry
params['nullity'] = ''
sql = sql_template % params
self.execute(sql)
# Oracle also has issues if we try to change a regular column
# to a LOB or vice versa (also REF, object, VARRAY or nested
# table, but these don't come up much in Django apps)
elif 'ORA-22858' in description or 'ORA-22859' in description:
self._alter_column_lob_workaround(table_name, name, field)
else:
self._print_sql_error(exc, sql_template % params)
raise
if not ignore_constraints:
# Add back FK constraints if needed
if field.rel: #and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
qn[1:-1], # foreign_key_sql uses this as part of constraint name
qn_col[1:-1], # foreign_key_sql uses this as part of constraint name
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
def _alter_column_lob_workaround(self, table_name, name, field):
"""
Oracle refuses to change a column type from/to LOB to/from a regular
column. In Django, this shows up when the field is changed from/to
a TextField.
What we need to do instead is:
- Rename the original column
- Add the desired field as new
- Update the table to transfer values from old to new
- Drop old column
"""
renamed = self._generate_temp_name(name)
self.rename_column(table_name, name, renamed)
self.add_column(table_name, name, field, keep_default=False)
self.execute("UPDATE %s set %s=%s" % (
self.quote_name(table_name),
self.quote_name(name),
self.quote_name(renamed),
))
self.delete_column(table_name, renamed)
def _generate_temp_name(self, for_name):
suffix = hex(hash(for_name)).upper()[1:]
return self.normalize_name(for_name + "_" + suffix)
@generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below...
@generic.delete_column_constraints
def rename_column(self, table_name, old, new):
if old == new:
# Short-circuit out
return []
self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
))
@generic.invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=False):
field = self._field_sanity(field)
sql = self.column_sql(table_name, name, field)
sql = self.adj_column_sql(sql)
if sql:
params = (
self.quote_name(table_name),
sql
)
sql = self.add_column_string % params
self.execute(sql)
# Now, drop the default if we need to
if field.default is not None:
field.default = NOT_PROVIDED
self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
def delete_column(self, table_name, name):
return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name)
def lookup_constraint(self, db_name, table_name, column_name=None):
if column_name:
# Column names in the constraint cache come from the database,
# make sure we use the properly shortened/uppercased version
# for lookup.
column_name = self.normalize_name(column_name)
return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name)
def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
if columns:
columns = [self.normalize_name(c) for c in columns]
return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type)
def _field_sanity(self, field):
"""
This particular override stops us sending DEFAULTs for BooleanField.
"""
if isinstance(field, models.BooleanField) and field.has_default():
field.default = int(field.to_python(field.get_default()))
# On Oracle, empty strings are null
if isinstance(field, (models.CharField, models.TextField)):
field.null = field.empty_strings_allowed
return field
def _default_value_workaround(self, value):
from datetime import date,time,datetime
if isinstance(value, (date,time,datetime)):
return "'%s'" % value
else:
return super(DatabaseOperations, self)._default_value_workaround(value)
def _fill_constraint_cache(self, db_name, table_name):
self._constraint_cache.setdefault(db_name, {})
self._constraint_cache[db_name][table_name] = {}
rows = self.execute("""
SELECT user_cons_columns.constraint_name,
user_cons_columns.column_name,
user_constraints.constraint_type
FROM user_constraints
JOIN user_cons_columns ON
user_constraints.table_name = user_cons_columns.table_name AND
user_constraints.constraint_name = user_cons_columns.constraint_name
WHERE user_constraints.table_name = '%s'
""" % self.normalize_name(table_name))
for constraint, column, kind in rows:
self._constraint_cache[db_name][table_name].setdefault(column, set())
self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint))
return

View File

@ -0,0 +1,96 @@
from __future__ import print_function
import uuid
from django.db.backends.util import truncate_name
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
PsycoPG2 implementation of database operations.
"""
backend_name = "postgres"
def create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for the index
Django's logic for naming field indexes is different in the
postgresql_psycopg2 backend, so we follow that for single-column
indexes.
"""
if len(column_names) == 1:
return truncate_name(
'%s_%s%s' % (table_name, column_names[0], suffix),
self._get_connection().ops.max_name_length()
)
return super(DatabaseOperations, self).create_index_name(table_name, column_names, suffix)
@generic.copy_column_constraints
@generic.delete_column_constraints
def rename_column(self, table_name, old, new):
if old == new:
# Short-circuit out
return []
self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
))
@generic.invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
"will rename the table and an associated ID sequence and primary key index"
# First, rename the table
generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
# Then, try renaming the ID sequence
# (if you're using other AutoFields... your problem, unfortunately)
if self.execute(
"""
SELECT 1
FROM information_schema.sequences
WHERE sequence_name = %s
""",
[old_table_name + '_id_seq']
):
generic.DatabaseOperations.rename_table(self, old_table_name + "_id_seq", table_name + "_id_seq")
# Rename primary key index, will not rename other indices on
# the table that are used by django (e.g. foreign keys). Until
# figure out how, you need to do this yourself.
pkey_index_names = self.execute(
"""
SELECT pg_index.indexrelid::regclass
FROM pg_index, pg_attribute
WHERE
indrelid = %s::regclass AND
pg_attribute.attrelid = indrelid AND
pg_attribute.attnum = any(pg_index.indkey)
AND indisprimary
""",
[table_name]
)
if old_table_name + "_pkey" in pkey_index_names:
generic.DatabaseOperations.rename_table(self, old_table_name + "_pkey", table_name + "_pkey")
def rename_index(self, old_index_name, index_name):
"Rename an index individually"
generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
def _default_value_workaround(self, value):
"Support for UUIDs on psql"
if isinstance(value, uuid.UUID):
return str(value)
else:
return super(DatabaseOperations, self)._default_value_workaround(value)
def _db_type_for_alter_column(self, field):
return self._db_positive_type_for_alter_column(DatabaseOperations, field)
def _alter_add_column_mods(self, field, name, params, sqls):
return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)

View File

View File

@ -0,0 +1,444 @@
from datetime import date, datetime, time
from warnings import warn
from django.db import models
from django.db.models import fields
from south.db import generic
from south.db.generic import delete_column_constraints, invalidate_table_constraints, copy_column_constraints
from south.exceptions import ConstraintDropped
from south.utils.py3 import string_types
try:
from django.utils.encoding import smart_text # Django >= 1.5
except ImportError:
from django.utils.encoding import smart_unicode as smart_text # Django < 1.5
from django.core.management.color import no_style
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
backend_name = "pyodbc"
add_column_string = 'ALTER TABLE %s ADD %s;'
alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s'
alter_string_set_null = 'ALTER COLUMN %(column)s %(type)s NULL'
alter_string_drop_null = 'ALTER COLUMN %(column)s %(type)s NOT NULL'
allows_combined_alters = False
drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s'
#create_check_constraint_sql = "ALTER TABLE %(table)s " + \
# generic.DatabaseOperations.add_check_constraint_fragment
create_foreign_key_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s " + \
"FOREIGN KEY (%(column)s) REFERENCES %(target)s"
create_unique_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s UNIQUE (%(columns)s)"
default_schema_name = "dbo"
has_booleans = False
@delete_column_constraints
def delete_column(self, table_name, name):
q_table_name, q_name = (self.quote_name(table_name), self.quote_name(name))
# Zap the constraints
for const in self._find_constraints_for_column(table_name,name):
params = {'table_name':q_table_name, 'constraint_name': const}
sql = self.drop_constraint_string % params
self.execute(sql, [])
# Zap the indexes
for ind in self._find_indexes_for_column(table_name,name):
params = {'table_name':q_table_name, 'index_name': ind}
sql = self.drop_index_string % params
self.execute(sql, [])
# Zap default if exists
drop_default = self.drop_column_default_sql(table_name, name)
if drop_default:
sql = "ALTER TABLE [%s] %s" % (table_name, drop_default)
self.execute(sql, [])
# Finally zap the column itself
self.execute(self.delete_column_string % (q_table_name, q_name), [])
def _find_indexes_for_column(self, table_name, name):
"Find the indexes that apply to a column, needed when deleting"
sql = """
SELECT si.name, si.id, sik.colid, sc.name
FROM dbo.sysindexes si WITH (NOLOCK)
INNER JOIN dbo.sysindexkeys sik WITH (NOLOCK)
ON sik.id = si.id
AND sik.indid = si.indid
INNER JOIN dbo.syscolumns sc WITH (NOLOCK)
ON si.id = sc.id
AND sik.colid = sc.colid
WHERE si.indid !=0
AND si.id = OBJECT_ID('%s')
AND sc.name = '%s'
"""
idx = self.execute(sql % (table_name, name), [])
return [i[0] for i in idx]
def _find_constraints_for_column(self, table_name, name, just_names=True):
"""
Find the constraints that apply to a column, needed when deleting. Defaults not included.
This is more general than the parent _constraints_affecting_columns, as on MSSQL this
includes PK and FK constraints.
"""
sql = """
SELECT CC.[CONSTRAINT_NAME]
,TC.[CONSTRAINT_TYPE]
,CHK.[CHECK_CLAUSE]
,RFD.TABLE_SCHEMA
,RFD.TABLE_NAME
,RFD.COLUMN_NAME
-- used for normalized names
,CC.TABLE_NAME
,CC.COLUMN_NAME
FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] TC
JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS CHK
ON CHK.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
AND CHK.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
AND CHK.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
AND 'CHECK' = TC.CONSTRAINT_TYPE
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS REF
ON REF.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
AND REF.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
AND REF.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
AND 'FOREIGN KEY' = TC.CONSTRAINT_TYPE
LEFT JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
ON RFD.CONSTRAINT_CATALOG = REF.UNIQUE_CONSTRAINT_CATALOG
AND RFD.CONSTRAINT_SCHEMA = REF.UNIQUE_CONSTRAINT_SCHEMA
AND RFD.CONSTRAINT_NAME = REF.UNIQUE_CONSTRAINT_NAME
WHERE CC.CONSTRAINT_CATALOG = CC.TABLE_CATALOG
AND CC.CONSTRAINT_SCHEMA = CC.TABLE_SCHEMA
AND CC.TABLE_CATALOG = %s
AND CC.TABLE_SCHEMA = %s
AND CC.TABLE_NAME = %s
AND CC.COLUMN_NAME = %s
"""
db_name = self._get_setting('name')
schema_name = self._get_schema_name()
table = self.execute(sql, [db_name, schema_name, table_name, name])
if just_names:
return [r[0] for r in table]
all = {}
for r in table:
cons_name, type = r[:2]
if type=='PRIMARY KEY' or type=='UNIQUE':
cons = all.setdefault(cons_name, (type,[]))
sql = '''
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
WHERE RFD.CONSTRAINT_CATALOG = %s
AND RFD.CONSTRAINT_SCHEMA = %s
AND RFD.TABLE_NAME = %s
AND RFD.CONSTRAINT_NAME = %s
'''
columns = self.execute(sql, [db_name, schema_name, table_name, cons_name])
cons[1].extend(col for col, in columns)
elif type=='CHECK':
cons = (type, r[2])
elif type=='FOREIGN KEY':
if cons_name in all:
raise NotImplementedError("Multiple-column foreign keys are not supported")
else:
cons = (type, r[3:6])
else:
raise NotImplementedError("Don't know how to handle constraints of type "+ type)
all[cons_name] = cons
return all
@invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Alters the given column name so it will match the given field.
Note that conversion between the two by the database must be possible.
Will not automatically add _id by default; to have this behavour, pass
explicit_name=False.
@param table_name: The name of the table to add the column to
@param name: The name of the column to alter
@param field: The new field definition to use
"""
self._fix_field_definition(field)
if not ignore_constraints:
qn = self.quote_name
sch = qn(self._get_schema_name())
tab = qn(table_name)
table = ".".join([sch, tab])
try:
self.delete_foreign_key(table_name, name)
except ValueError:
# no FK constraint on this field. That's OK.
pass
constraints = self._find_constraints_for_column(table_name, name, False)
for constraint in constraints.keys():
params = dict(table_name = table,
constraint_name = qn(constraint))
sql = self.drop_constraint_string % params
self.execute(sql, [])
ret_val = super(DatabaseOperations, self).alter_column(table_name, name, field, explicit_name, ignore_constraints=True)
if not ignore_constraints:
for cname, (ctype,args) in constraints.items():
params = dict(table = table,
constraint = qn(cname))
if ctype=='UNIQUE':
params['columns'] = ", ".join(map(qn,args))
sql = self.create_unique_sql % params
elif ctype=='PRIMARY KEY':
params['columns'] = ", ".join(map(qn,args))
sql = self.create_primary_key_string % params
elif ctype=='FOREIGN KEY':
continue
# Foreign keys taken care of below
#target = "%s.%s(%s)" % tuple(map(qn,args))
#params.update(column = qn(name), target = target)
#sql = self.create_foreign_key_sql % params
elif ctype=='CHECK':
warn(ConstraintDropped("CHECK "+ args, table_name, name))
continue
#TODO: Some check constraints should be restored; but not before the generic
# backend restores them.
#params['check'] = args
#sql = self.create_check_constraint_sql % params
else:
raise NotImplementedError("Don't know how to handle constraints of type "+ type)
self.execute(sql, [])
# Create foreign key if necessary
if field.rel and self.supports_foreign_keys:
self.execute(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
model = self.mock_model("FakeModelForIndexCreation", table_name)
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
self.execute(stmt)
return ret_val
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# Historically, we used to set defaults here.
# But since South 0.8, we don't ever set defaults on alter-column -- we only
# use database-level defaults as scaffolding when adding columns.
# However, we still sometimes need to remove defaults in alter-column.
table_name = self.quote_name(params['table_name'])
drop_default = self.drop_column_default_sql(table_name, name)
if drop_default:
sqls.append((drop_default, []))
def _value_to_unquoted_literal(self, field, value):
# Start with the field's own translation
conn = self._get_connection()
value = field.get_db_prep_save(value, connection=conn)
# This is still a Python object -- nobody expects to need a literal.
if isinstance(value, string_types):
return smart_text(value)
elif isinstance(value, (date,time,datetime)):
return value.isoformat()
else:
#TODO: Anybody else needs special translations?
return str(value)
def _default_value_workaround(self, value):
if isinstance(value, (date,time,datetime)):
return value.isoformat()
else:
return super(DatabaseOperations, self)._default_value_workaround(value)
def _quote_string(self, s):
return "'" + s.replace("'","''") + "'"
def drop_column_default_sql(self, table_name, name, q_name=None):
"MSSQL specific drop default, which is a pain"
sql = """
SELECT object_name(cdefault)
FROM syscolumns
WHERE id = object_id('%s')
AND name = '%s'
"""
cons = self.execute(sql % (table_name, name), [])
if cons and cons[0] and cons[0][0]:
return "DROP CONSTRAINT %s" % cons[0][0]
return None
def _fix_field_definition(self, field):
if isinstance(field, (fields.BooleanField, fields.NullBooleanField)):
if field.default == True:
field.default = 1
if field.default == False:
field.default = 0
# This is copied from South's generic add_column, with two modifications:
# 1) The sql-server-specific call to _fix_field_definition
# 2) Removing a default, when needed, by calling drop_default and not the more general alter_column
@invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=False):
"""
Adds the column 'name' to the table 'table_name'.
Uses the 'field' paramater, a django.db.models.fields.Field instance,
to generate the necessary sql
@param table_name: The name of the table to add the column to
@param name: The name of the column to add
@param field: The field to use
"""
self._fix_field_definition(field)
sql = self.column_sql(table_name, name, field)
if sql:
params = (
self.quote_name(table_name),
sql,
)
sql = self.add_column_string % params
self.execute(sql)
# Now, drop the default if we need to
if not keep_default and field.default is not None:
field.default = fields.NOT_PROVIDED
#self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
self.drop_default(table_name, name, field)
@invalidate_table_constraints
def drop_default(self, table_name, name, field):
fragment = self.drop_column_default_sql(table_name, name)
if fragment:
table_name = self.quote_name(table_name)
sql = " ".join(["ALTER TABLE", table_name, fragment])
self.execute(sql)
@invalidate_table_constraints
def create_table(self, table_name, field_defs):
# Tweak stuff as needed
for _, f in field_defs:
self._fix_field_definition(f)
# Run
super(DatabaseOperations, self).create_table(table_name, field_defs)
def _find_referencing_fks(self, table_name):
"MSSQL does not support cascading FKs when dropping tables, we need to implement."
# FK -- Foreign Keys
# UCTU -- Unique Constraints Table Usage
# FKTU -- Foreign Key Table Usage
# (last two are both really CONSTRAINT_TABLE_USAGE, different join conditions)
sql = """
SELECT FKTU.TABLE_SCHEMA as REFING_TABLE_SCHEMA,
FKTU.TABLE_NAME as REFING_TABLE_NAME,
FK.[CONSTRAINT_NAME] as FK_NAME
FROM [INFORMATION_SCHEMA].[REFERENTIAL_CONSTRAINTS] FK
JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] UCTU
ON FK.UNIQUE_CONSTRAINT_CATALOG = UCTU.CONSTRAINT_CATALOG and
FK.UNIQUE_CONSTRAINT_NAME = UCTU.CONSTRAINT_NAME and
FK.UNIQUE_CONSTRAINT_SCHEMA = UCTU.CONSTRAINT_SCHEMA
JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] FKTU
ON FK.CONSTRAINT_CATALOG = FKTU.CONSTRAINT_CATALOG and
FK.CONSTRAINT_NAME = FKTU.CONSTRAINT_NAME and
FK.CONSTRAINT_SCHEMA = FKTU.CONSTRAINT_SCHEMA
WHERE FK.CONSTRAINT_CATALOG = %s
AND UCTU.TABLE_SCHEMA = %s -- REFD_TABLE_SCHEMA
AND UCTU.TABLE_NAME = %s -- REFD_TABLE_NAME
"""
db_name = self._get_setting('name')
schema_name = self._get_schema_name()
return self.execute(sql, [db_name, schema_name, table_name])
@invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
"""
Deletes the table 'table_name'.
"""
if cascade:
refing = self._find_referencing_fks(table_name)
for schmea, table, constraint in refing:
table = ".".join(map (self.quote_name, [schmea, table]))
params = dict(table_name = table,
constraint_name = self.quote_name(constraint))
sql = self.drop_constraint_string % params
self.execute(sql, [])
cascade = False
super(DatabaseOperations, self).delete_table(table_name, cascade)
@copy_column_constraints
@delete_column_constraints
def rename_column(self, table_name, old, new):
"""
Renames the column of 'table_name' from 'old' to 'new'.
WARNING - This isn't transactional on MSSQL!
"""
if old == new:
# No Operation
return
# Examples on the MS site show the table name not being quoted...
params = (table_name, self.quote_name(old), self.quote_name(new))
self.execute("EXEC sp_rename '%s.%s', %s, 'COLUMN'" % params)
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
"""
Renames the table 'old_table_name' to 'table_name'.
WARNING - This isn't transactional on MSSQL!
"""
if old_table_name == table_name:
# No Operation
return
params = (self.quote_name(old_table_name), self.quote_name(table_name))
self.execute('EXEC sp_rename %s, %s' % params)
def _db_type_for_alter_column(self, field):
return self._db_positive_type_for_alter_column(DatabaseOperations, field)
def _alter_add_column_mods(self, field, name, params, sqls):
return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)
@invalidate_table_constraints
def delete_foreign_key(self, table_name, column):
super(DatabaseOperations, self).delete_foreign_key(table_name, column)
# A FK also implies a non-unique index
find_index_sql = """
SELECT i.name -- s.name, t.name, c.name
FROM sys.tables t
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
INNER JOIN sys.indexes i ON i.object_id = t.object_id
INNER JOIN sys.index_columns ic ON ic.object_id = t.object_id
AND ic.index_id = i.index_id
INNER JOIN sys.columns c ON c.object_id = t.object_id
AND ic.column_id = c.column_id
WHERE i.is_unique=0 AND i.is_primary_key=0 AND i.is_unique_constraint=0
AND s.name = %s
AND t.name = %s
AND c.name = %s
"""
schema = self._get_schema_name()
indexes = self.execute(find_index_sql, [schema, table_name, column])
qn = self.quote_name
for index in (i[0] for i in indexes if i[0]): # "if i[0]" added because an empty name may return
self.execute("DROP INDEX %s on %s.%s" % (qn(index), qn(schema), qn(table_name) ))

272
south/db/sqlite3.py Normal file
View File

@ -0,0 +1,272 @@
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
SQLite3 implementation of database operations.
"""
backend_name = "sqlite3"
# SQLite ignores several constraints. I wish I could.
supports_foreign_keys = False
has_check_constraints = False
has_booleans = False
def add_column(self, table_name, name, field, *args, **kwds):
"""
Adds a column.
"""
# If it's not nullable, and has no default, raise an error (SQLite is picky)
if (not field.null and
(not field.has_default() or field.get_default() is None) and
not field.empty_strings_allowed):
raise ValueError("You cannot add a null=False column without a default value.")
# Initialise the field.
field.set_attributes_from_name(name)
# We add columns by remaking the table; even though SQLite supports
# adding columns, it doesn't support adding PRIMARY KEY or UNIQUE cols.
# We define fields with no default; a default will be used, though, to fill up the remade table
field_default = None
if not getattr(field, '_suppress_default', False):
default = field.get_default()
if default is not None:
field_default = "'%s'" % field.get_db_prep_save(default, connection=self._get_connection())
field._suppress_default = True
self._remake_table(table_name, added={
field.column: (self._column_sql_for_create(table_name, name, field, False), field_default)
})
def _get_full_table_description(self, connection, cursor, table_name):
cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name))
# cid, name, type, notnull, dflt_value, pk
return [{'name': field[1],
'type': field[2],
'null_ok': not field[3],
'dflt_value': field[4],
'pk': field[5] # undocumented
} for field in cursor.fetchall()]
@generic.invalidate_table_constraints
def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]):
"""
Given a table and three sets of changes (renames, deletes, alters),
recreates it with the modified schema.
"""
# Dry runs get skipped completely
if self.dry_run:
return
# Temporary table's name
temp_name = "_south_new_" + table_name
# Work out the (possibly new) definitions of each column
definitions = {}
cursor = self._get_connection().cursor()
# Get the index descriptions
indexes = self._get_connection().introspection.get_indexes(cursor, table_name)
standalone_indexes = self._get_standalone_indexes(table_name)
# Work out new column defs.
for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name):
name = column_info['name']
if name in deleted:
continue
# Get the type, ignoring PRIMARY KEY (we need to be consistent)
type = column_info['type'].replace("PRIMARY KEY", "")
# Add on primary key, not null or unique if needed.
if (primary_key_override and primary_key_override == name) or \
(not primary_key_override and name in indexes and
indexes[name]['primary_key']):
type += " PRIMARY KEY"
elif not column_info['null_ok']:
type += " NOT NULL"
if (name in indexes and indexes[name]['unique'] and
name not in uniques_deleted):
type += " UNIQUE"
if column_info['dflt_value'] is not None:
type += " DEFAULT " + column_info['dflt_value']
# Deal with a rename
if name in renames:
name = renames[name]
# Add to the defs
definitions[name] = type
# Add on altered columns
for name, type in altered.items():
if (primary_key_override and primary_key_override == name) or \
(not primary_key_override and name in indexes and
indexes[name]['primary_key']):
type += " PRIMARY KEY"
if (name in indexes and indexes[name]['unique'] and
name not in uniques_deleted):
type += " UNIQUE"
definitions[name] = type
# Add on the new columns
for name, (type,_) in added.items():
if (primary_key_override and primary_key_override == name):
type += " PRIMARY KEY"
definitions[name] = type
# Alright, Make the table
self.execute("CREATE TABLE %s (%s)" % (
self.quote_name(temp_name),
", ".join(["%s %s" % (self.quote_name(cname), ctype) for cname, ctype in definitions.items()]),
))
# Copy over the data
self._copy_data(table_name, temp_name, renames, added)
# Delete the old table, move our new one over it
self.delete_table(table_name)
self.rename_table(temp_name, table_name)
# Recreate multi-valued indexes
# We can't do that before since it's impossible to rename indexes
# and index name scope is global
self._make_standalone_indexes(table_name, standalone_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted)
self.deferred_sql = [] # prevent double indexing
def _copy_data(self, src, dst, field_renames={}, added={}):
"Used to copy data into a new table"
# Make a list of all the fields to select
cursor = self._get_connection().cursor()
src_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, src)]
dst_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, dst)]
src_fields_new = []
dst_fields_new = []
for field in src_fields:
if field in field_renames:
dst_fields_new.append(self.quote_name(field_renames[field]))
elif field in dst_fields:
dst_fields_new.append(self.quote_name(field))
else:
continue
src_fields_new.append(self.quote_name(field))
for field, (_,default) in added.items():
if default is not None:
field = self.quote_name(field)
src_fields_new.append("%s as %s" % (default, field))
dst_fields_new.append(field)
# Copy over the data
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % (
self.quote_name(dst),
', '.join(dst_fields_new),
', '.join(src_fields_new),
self.quote_name(src),
))
def _create_unique(self, table_name, columns):
self._create_index(table_name, columns, True)
def _create_index(self, table_name, columns, unique=False, index_name=None):
if index_name is None:
index_name = '%s_%s' % (table_name, '__'.join(columns))
self.execute("CREATE %sINDEX %s ON %s(%s);" % (
unique and "UNIQUE " or "",
self.quote_name(index_name),
self.quote_name(table_name),
', '.join(self.quote_name(c) for c in columns),
))
def _get_standalone_indexes(self, table_name):
indexes = []
cursor = self._get_connection().cursor()
cursor.execute('PRAGMA index_list(%s)' % self.quote_name(table_name))
# seq, name, unique
for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
cursor.execute('PRAGMA index_info(%s)' % self.quote_name(index))
info = cursor.fetchall()
if len(info) == 1 and unique:
# This index is already specified in the CREATE TABLE columns
# specification
continue
columns = []
for field in info:
columns.append(field[2])
indexes.append((index, columns, unique))
return indexes
def _make_standalone_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]):
for index_name, index, unique in indexes:
columns = []
for name in index:
# Handle deletion
if name in deleted:
columns = []
break
# Handle renames
if name in renames:
name = renames[name]
columns.append(name)
if columns and (set(columns) != set(uniques_deleted) or not unique):
self._create_index(table_name, columns, unique, index_name)
def _column_sql_for_create(self, table_name, name, field, explicit_name=True):
"Given a field and its name, returns the full type for the CREATE TABLE (without unique/pk)"
field.set_attributes_from_name(name)
if not explicit_name:
name = field.db_column
else:
field.column = name
sql = self.column_sql(table_name, name, field, with_name=False, field_prepared=True)
# Remove keywords we don't want (this should be type only, not constraint)
if sql:
sql = sql.replace("PRIMARY KEY", "")
return sql
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Changes a column's SQL definition.
Note that this sqlite3 implementation ignores the ignore_constraints argument.
The argument is accepted for API compatibility with the generic
DatabaseOperations.alter_column() method.
"""
# Change nulls to default if needed
if not field.null and field.has_default():
params = {
"column": self.quote_name(name),
"table_name": self.quote_name(table_name)
}
self._update_nulls_to_default(params, field)
# Remake the table correctly
field._suppress_default = True
self._remake_table(table_name, altered={
name: self._column_sql_for_create(table_name, name, field, explicit_name),
})
def delete_column(self, table_name, column_name):
"""
Deletes a column.
"""
self._remake_table(table_name, deleted=[column_name])
def rename_column(self, table_name, old, new):
"""
Renames a column from one name to another.
"""
self._remake_table(table_name, renames={old: new})
def create_unique(self, table_name, columns):
"""
Create an unique index on columns
"""
self._create_unique(table_name, columns)
def delete_unique(self, table_name, columns):
"""
Delete an unique index
"""
self._remake_table(table_name, uniques_deleted=columns)
def create_primary_key(self, table_name, columns):
if not isinstance(columns, (list, tuple)):
columns = [columns]
assert len(columns) == 1, "SQLite backend does not support multi-column primary keys"
self._remake_table(table_name, primary_key_override=columns[0])
# Not implemented this yet.
def delete_primary_key(self, table_name):
# By passing True in, we make sure we wipe all existing PKs.
self._remake_table(table_name, primary_key_override=True)
# No cascades on deletes
def delete_table(self, table_name, cascade=True):
generic.DatabaseOperations.delete_table(self, table_name, False)

160
south/exceptions.py Normal file
View File

@ -0,0 +1,160 @@
from __future__ import print_function
from traceback import format_exception, format_exc
class SouthError(RuntimeError):
pass
class SouthWarning(RuntimeWarning):
pass
class BrokenMigration(SouthError):
def __init__(self, migration, exc_info):
self.migration = migration
self.exc_info = exc_info
if self.exc_info:
self.traceback = ''.join(format_exception(*self.exc_info))
else:
try:
self.traceback = format_exc()
except AttributeError: # Python3 when there is no previous exception
self.traceback = None
def __str__(self):
return ("While loading migration '%(migration)s':\n"
'%(traceback)s' % self.__dict__)
class UnknownMigration(BrokenMigration):
def __str__(self):
if not hasattr(self, "traceback"):
self.traceback = ""
return ("Migration '%(migration)s' probably doesn't exist.\n"
'%(traceback)s' % self.__dict__)
class InvalidMigrationModule(SouthError):
def __init__(self, application, module):
self.application = application
self.module = module
def __str__(self):
return ('The migration module specified for %(application)s, %(module)r, is invalid; the parent module does not exist.' % self.__dict__)
class NoMigrations(SouthError):
def __init__(self, application):
self.application = application
def __str__(self):
return "Application '%(application)s' has no migrations." % self.__dict__
class MultiplePrefixMatches(SouthError):
def __init__(self, prefix, matches):
self.prefix = prefix
self.matches = matches
def __str__(self):
self.matches_list = "\n ".join([str(m) for m in self.matches])
return ("Prefix '%(prefix)s' matches more than one migration:\n"
" %(matches_list)s") % self.__dict__
class GhostMigrations(SouthError):
def __init__(self, ghosts):
self.ghosts = ghosts
def __str__(self):
self.ghosts_list = "\n ".join([str(m) for m in self.ghosts])
return ("\n\n ! These migrations are in the database but not on disk:\n"
" %(ghosts_list)s\n"
" ! I'm not trusting myself; either fix this yourself by fiddling\n"
" ! with the south_migrationhistory table, or pass --delete-ghost-migrations\n"
" ! to South to have it delete ALL of these records (this may not be good).") % self.__dict__
class CircularDependency(SouthError):
def __init__(self, trace):
self.trace = trace
def __str__(self):
trace = " -> ".join([str(s) for s in self.trace])
return ("Found circular dependency:\n"
" %s") % trace
class InconsistentMigrationHistory(SouthError):
def __init__(self, problems):
self.problems = problems
def __str__(self):
return ('Inconsistent migration history\n'
'The following options are available:\n'
' --merge: will just attempt the migration ignoring any potential dependency conflicts.')
class DependsOnHigherMigration(SouthError):
def __init__(self, migration, depends_on):
self.migration = migration
self.depends_on = depends_on
def __str__(self):
return "Lower migration '%(migration)s' depends on a higher migration '%(depends_on)s' in the same app." % self.__dict__
class DependsOnUnknownMigration(SouthError):
def __init__(self, migration, depends_on):
self.migration = migration
self.depends_on = depends_on
def __str__(self):
print("Migration '%(migration)s' depends on unknown migration '%(depends_on)s'." % self.__dict__)
class DependsOnUnmigratedApplication(SouthError):
def __init__(self, migration, application):
self.migration = migration
self.application = application
def __str__(self):
return "Migration '%(migration)s' depends on unmigrated application '%(application)s'." % self.__dict__
class FailedDryRun(SouthError):
def __init__(self, migration, exc_info):
self.migration = migration
self.name = migration.name()
self.exc_info = exc_info
self.traceback = ''.join(format_exception(*self.exc_info))
def __str__(self):
return (" ! Error found during dry run of '%(name)s'! Aborting.\n"
"%(traceback)s") % self.__dict__
class ORMBaseNotIncluded(SouthError):
"""Raised when a frozen model has something in _ormbases which isn't frozen."""
pass
class UnfreezeMeLater(Exception):
"""An exception, which tells the ORM unfreezer to postpone this model."""
pass
class ImpossibleORMUnfreeze(SouthError):
"""Raised if the ORM can't manage to unfreeze all the models in a linear fashion."""
pass
class ConstraintDropped(SouthWarning):
def __init__(self, constraint, table, column=None):
self.table = table
if column:
self.column = ".%s" % column
else:
self.column = ""
self.constraint = constraint
def __str__(self):
return "Constraint %(constraint)s was dropped from %(table)s%(column)s -- was this intended?" % self.__dict__

10
south/hacks/__init__.py Normal file
View File

@ -0,0 +1,10 @@
"""
The hacks module encapsulates all the horrible things that play with Django
internals in one, evil place.
This top file will automagically expose the correct Hacks class.
"""
# Currently, these work for 1.0 and 1.1.
from south.hacks.django_1_0 import Hacks
hacks = Hacks()

110
south/hacks/django_1_0.py Normal file
View File

@ -0,0 +1,110 @@
"""
Hacks for the Django 1.0/1.0.2 releases.
"""
import django
from django.conf import settings
from django.db.backends.creation import BaseDatabaseCreation
from django.db.models.loading import cache
from django.core import management
from django.core.management.commands.flush import Command as FlushCommand
from django.utils.datastructures import SortedDict
from south.utils.py3 import string_types
class SkipFlushCommand(FlushCommand):
def handle_noargs(self, **options):
# no-op to avoid calling flush
return
class Hacks:
def set_installed_apps(self, apps):
"""
Sets Django's INSTALLED_APPS setting to be effectively the list passed in.
"""
# Make sure it's a list.
apps = list(apps)
# Make sure it contains strings
if apps:
assert isinstance(apps[0], string_types), "The argument to set_installed_apps must be a list of strings."
# Monkeypatch in!
settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = (
apps,
settings.INSTALLED_APPS,
)
self._redo_app_cache()
def reset_installed_apps(self):
"""
Undoes the effect of set_installed_apps.
"""
settings.INSTALLED_APPS = settings.OLD_INSTALLED_APPS
self._redo_app_cache()
def _redo_app_cache(self):
"""
Used to repopulate AppCache after fiddling with INSTALLED_APPS.
"""
cache.loaded = False
cache.handled = set() if django.VERSION >= (1, 6) else {}
cache.postponed = []
cache.app_store = SortedDict()
cache.app_models = SortedDict()
cache.app_errors = {}
cache._populate()
def clear_app_cache(self):
"""
Clears the contents of AppCache to a blank state, so new models
from the ORM can be added.
"""
self.old_app_models, cache.app_models = cache.app_models, {}
def unclear_app_cache(self):
"""
Reversed the effects of clear_app_cache.
"""
cache.app_models = self.old_app_models
cache._get_models_cache = {}
def repopulate_app_cache(self):
"""
Rebuilds AppCache with the real model definitions.
"""
cache._populate()
def store_app_cache_state(self):
self.stored_app_cache_state = dict(**cache.__dict__)
def restore_app_cache_state(self):
cache.__dict__ = self.stored_app_cache_state
def patch_flush_during_test_db_creation(self):
"""
Patches BaseDatabaseCreation.create_test_db to not flush database
"""
def patch(f):
def wrapper(*args, **kwargs):
# hold onto the original and replace flush command with a no-op
original_flush_command = management._commands['flush']
try:
management._commands['flush'] = SkipFlushCommand()
# run create_test_db
return f(*args, **kwargs)
finally:
# unpatch flush back to the original
management._commands['flush'] = original_flush_command
return wrapper
BaseDatabaseCreation.create_test_db = patch(BaseDatabaseCreation.create_test_db)

View File

@ -0,0 +1,11 @@
# This module contains built-in introspector plugins for various common
# Django apps.
# These imports trigger the lower-down files
import south.introspection_plugins.geodjango
import south.introspection_plugins.django_audit_log
import south.introspection_plugins.django_tagging
import south.introspection_plugins.django_taggit
import south.introspection_plugins.django_objectpermissions
import south.introspection_plugins.annoying_autoonetoone

View File

@ -0,0 +1,11 @@
from django.conf import settings
from south.modelsinspector import add_introspection_rules
if 'annoying' in settings.INSTALLED_APPS:
try:
from annoying.fields import AutoOneToOneField
except ImportError:
pass
else:
#django-annoying's AutoOneToOneField is essentially a OneToOneField.
add_introspection_rules([], ["^annoying\.fields\.AutoOneToOneField"])

View File

@ -0,0 +1,30 @@
"""
South introspection rules for django-audit-log
"""
from django.contrib.auth.models import User
from django.conf import settings
from south.modelsinspector import add_introspection_rules
if "audit_log" in settings.INSTALLED_APPS:
try:
# Try and import the field so we can see if audit_log is available
from audit_log.models import fields
# Make sure the `to` and `null` parameters will be ignored
rules = [(
(fields.LastUserField,),
[],
{
'to': ['rel.to', {'default': User}],
'null': ['null', {'default': True}],
},
)]
# Add the rules for the `LastUserField`
add_introspection_rules(
rules,
['^audit_log\.models\.fields\.LastUserField'],
)
except ImportError:
pass

View File

@ -0,0 +1,16 @@
"""
South introspection rules for django-objectpermissions
"""
from django.conf import settings
from south.modelsinspector import add_ignored_fields
if 'objectpermissions' in settings.INSTALLED_APPS:
try:
from objectpermissions.models import UserPermissionRelation, GroupPermissionRelation
except ImportError:
pass
else:
add_ignored_fields(["^objectpermissions\.models\.UserPermissionRelation",
"^objectpermissions\.models\.GroupPermissionRelation"])

View File

@ -0,0 +1,24 @@
from south.modelsinspector import add_introspection_rules
from django.conf import settings
if "tagging" in settings.INSTALLED_APPS:
try:
from tagging.fields import TagField
except ImportError:
pass
else:
rules = [
(
(TagField, ),
[],
{
"blank": ["blank", {"default": True}],
"max_length": ["max_length", {"default": 255}],
},
),
]
add_introspection_rules(rules, ["^tagging\.fields",])
if "tagging_autocomplete" in settings.INSTALLED_APPS:
add_introspection_rules([], ["^tagging_autocomplete\.models\.TagAutocompleteField"])

View File

@ -0,0 +1,14 @@
"""
South introspection rules for django-taggit
"""
from django.conf import settings
from south.modelsinspector import add_ignored_fields
if 'taggit' in settings.INSTALLED_APPS:
try:
from taggit.managers import TaggableManager
except ImportError:
pass
else:
add_ignored_fields(["^taggit\.managers"])

View File

@ -0,0 +1,21 @@
from south.modelsinspector import add_introspection_rules
from django.conf import settings
if "timezones" in settings.INSTALLED_APPS:
try:
from timezones.fields import TimeZoneField
except ImportError:
pass
else:
rules = [
(
(TimeZoneField, ),
[],
{
"blank": ["blank", {"default": True}],
"max_length": ["max_length", {"default": 100}],
},
),
]
add_introspection_rules(rules, ["^timezones\.fields",])

View File

@ -0,0 +1,45 @@
"""
GeoDjango introspection rules
"""
import django
from django.conf import settings
from south.modelsinspector import add_introspection_rules
has_gis = "django.contrib.gis" in settings.INSTALLED_APPS
if has_gis:
# Alright,import the field
from django.contrib.gis.db.models.fields import GeometryField
# Make some introspection rules
if django.VERSION[0] == 1 and django.VERSION[1] >= 1:
# Django 1.1's gis module renamed these.
rules = [
(
(GeometryField, ),
[],
{
"srid": ["srid", {"default": 4326}],
"spatial_index": ["spatial_index", {"default": True}],
"dim": ["dim", {"default": 2}],
"geography": ["geography", {"default": False}],
},
),
]
else:
rules = [
(
(GeometryField, ),
[],
{
"srid": ["_srid", {"default": 4326}],
"spatial_index": ["_index", {"default": True}],
"dim": ["_dim", {"default": 2}],
},
),
]
# Install them
add_introspection_rules(rules, ["^django\.contrib\.gis"])

38
south/logger.py Normal file
View File

@ -0,0 +1,38 @@
import sys
import logging
from django.conf import settings
# Create a dummy handler to use for now.
class NullHandler(logging.Handler):
def emit(self, record):
pass
def get_logger():
"Attach a file handler to the logger if there isn't one already."
debug_on = getattr(settings, "SOUTH_LOGGING_ON", False)
logging_file = getattr(settings, "SOUTH_LOGGING_FILE", False)
if debug_on:
if logging_file:
if len(_logger.handlers) < 2:
_logger.addHandler(logging.FileHandler(logging_file))
_logger.setLevel(logging.DEBUG)
else:
raise IOError("SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting.")
return _logger
def close_logger():
"Closes the logger handler for the file, so we can remove the file after a test."
for handler in _logger.handlers:
_logger.removeHandler(handler)
if isinstance(handler, logging.FileHandler):
handler.close()
def init_logger():
"Initialize the south logger"
logger = logging.getLogger("south")
logger.addHandler(NullHandler())
return logger
_logger = init_logger()

View File

View File

@ -0,0 +1,40 @@
# Common framework for syncdb actions
import copy
from django.core import management
from django.conf import settings
# Make sure the template loader cache is fixed _now_ (#448)
import django.template.loaders.app_directories
from south.hacks import hacks
from south.management.commands.syncdb import Command as SyncCommand
class MigrateAndSyncCommand(SyncCommand):
"""Used for situations where "syncdb" is called by test frameworks."""
option_list = copy.deepcopy(SyncCommand.option_list)
for opt in option_list:
if "--migrate" == opt.get_opt_string():
opt.default = True
break
def patch_for_test_db_setup():
# Load the commands cache
management.get_commands()
# Repoint to the correct version of syncdb
if hasattr(settings, "SOUTH_TESTS_MIGRATE") and not settings.SOUTH_TESTS_MIGRATE:
# point at the core syncdb command when creating tests
# tests should always be up to date with the most recent model structure
management._commands['syncdb'] = 'django.core'
else:
management._commands['syncdb'] = MigrateAndSyncCommand()
# Avoid flushing data migrations.
# http://code.djangoproject.com/ticket/14661 introduced change that flushed custom
# sql during the test database creation (thus flushing the data migrations).
# we patch flush to be no-op during create_test_db, but still allow flushing
# after each test for non-transactional backends.
hacks.patch_flush_during_test_db_creation()

View File

@ -0,0 +1,95 @@
"""
Quick conversion command module.
"""
from __future__ import print_function
from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.conf import settings
from django.db import models
from django.core import management
from django.core.exceptions import ImproperlyConfigured
from south.migration import Migrations
from south.hacks import hacks
from south.exceptions import NoMigrations
class Command(BaseCommand):
option_list = BaseCommand.option_list
if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
option_list += (
make_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
)
option_list += (
make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False,
help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."),
make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False,
help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."),
)
help = "Quickly converts the named application to use South if it is currently using syncdb."
def handle(self, app=None, *args, **options):
# Make sure we have an app
if not app:
print("Please specify an app to convert.")
return
# See if the app exists
app = app.split(".")[-1]
try:
app_module = models.get_app(app)
except ImproperlyConfigured:
print("There is no enabled application matching '%s'." % app)
return
# Try to get its list of models
model_list = models.get_models(app_module)
if not model_list:
print("This application has no models; this command is for applications that already have models syncdb'd.")
print("Make some models, and then use ./manage.py schemamigration %s --initial instead." % app)
return
# Ask South if it thinks it's already got migrations
try:
Migrations(app)
except NoMigrations:
pass
else:
print("This application is already managed by South.")
return
# Finally! It seems we've got a candidate, so do the two-command trick
verbosity = int(options.get('verbosity', 0))
management.call_command("schemamigration", app, initial=True, verbosity=verbosity)
# Now, we need to re-clean and sanitise appcache
hacks.clear_app_cache()
hacks.repopulate_app_cache()
# And also clear our cached Migration classes
Migrations._clear_cache()
# Now, migrate
management.call_command(
"migrate",
app,
"0001",
fake=True,
verbosity=verbosity,
ignore_ghosts=options.get("ignore_ghosts", False),
delete_ghosts=options.get("delete_ghosts", False),
)
print()
print("App '%s' converted. Note that South assumed the application's models matched the database" % app)
print("(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app)
print("directory, revert models.py so it matches the database, and try again.")

View File

@ -0,0 +1,139 @@
"""
Data migration creation command
"""
from __future__ import print_function
import sys
import os
import re
from optparse import make_option
try:
set
except NameError:
from sets import Set as set
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import models
from django.conf import settings
from south.migration import Migrations
from south.exceptions import NoMigrations
from south.creator import freezer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified app(s). Provide an app name with each; use the option multiple times for multiple apps'),
make_option('--stdout', action='store_true', dest='stdout', default=False,
help='Print the migration to stdout instead of writing it to a file.'),
)
help = "Creates a new template data migration for the given app"
usage_str = "Usage: ./manage.py datamigration appname migrationname [--stdout] [--freeze appname]"
def handle(self, app=None, name="", freeze_list=None, stdout=False, verbosity=1, **options):
verbosity = int(verbosity)
# Any supposed lists that are None become empty lists
freeze_list = freeze_list or []
# --stdout means name = -
if stdout:
name = "-"
# Only allow valid names
if re.search('[^_\w]', name) and name != "-":
self.error("Migration names should contain only alphanumeric characters and underscores.")
# If not name, there's an error
if not name:
self.error("You must provide a name for this migration.\n" + self.usage_str)
if not app:
self.error("You must provide an app to create a migration for.\n" + self.usage_str)
# Ensure that verbosity is not a string (Python 3)
try:
verbosity = int(verbosity)
except ValueError:
self.error("Verbosity must be an number.\n" + self.usage_str)
# Get the Migrations for this app (creating the migrations dir if needed)
migrations = Migrations(app, force_creation=True, verbose_creation=verbosity > 0)
# See what filename is next in line. We assume they use numbers.
new_filename = migrations.next_filename(name)
# Work out which apps to freeze
apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
# So, what's in this file, then?
file_contents = self.get_migration_template() % {
"frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
"complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
}
# - is a special name which means 'print to stdout'
if name == "-":
print(file_contents)
# Write the migration file if the name isn't -
else:
fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
fp.write(file_contents)
fp.close()
print("Created %s." % new_filename, file=sys.stderr)
def calc_frozen_apps(self, migrations, freeze_list):
"""
Works out, from the current app, settings, and the command line options,
which apps should be frozen.
"""
apps_to_freeze = []
for to_freeze in freeze_list:
if "." in to_freeze:
self.error("You cannot freeze %r; you must provide an app label, like 'auth' or 'books'." % to_freeze)
# Make sure it's a real app
if not models.get_app(to_freeze):
self.error("You cannot freeze %r; it's not an installed app." % to_freeze)
# OK, it's fine
apps_to_freeze.append(to_freeze)
if getattr(settings, 'SOUTH_AUTO_FREEZE_APP', True):
apps_to_freeze.append(migrations.app_label())
return apps_to_freeze
def error(self, message, code=1):
"""
Prints the error, and exits with the given code.
"""
print(message, file=sys.stderr)
sys.exit(code)
def get_migration_template(self):
return MIGRATION_TEMPLATE
MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
def backwards(self, orm):
"Write your backwards methods here."
models = %(frozen_models)s
%(complete_apps)s
symmetrical = True
"""

View File

@ -0,0 +1,63 @@
"""
Outputs a graphviz dot file of the dependencies.
"""
from __future__ import print_function
from optparse import make_option
import re
import textwrap
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from south.migration import Migrations, all_migrations
class Command(BaseCommand):
help = "Outputs a GraphViz dot file of all migration dependencies to stdout."
def handle(self, **options):
# Resolve dependencies
Migrations.calculate_dependencies()
colors = [ 'crimson', 'darkgreen', 'darkgoldenrod', 'navy',
'brown', 'darkorange', 'aquamarine' , 'blueviolet' ]
color_index = 0
wrapper = textwrap.TextWrapper(width=40)
print("digraph G {")
# Group each app in a subgraph
for migrations in all_migrations():
print(" subgraph %s {" % migrations.app_label())
print(" node [color=%s];" % colors[color_index])
for migration in migrations:
# Munge the label - text wrap and change _ to spaces
label = "%s - %s" % (
migration.app_label(), migration.name())
label = re.sub(r"_+", " ", label)
label= "\\n".join(wrapper.wrap(label))
print(' "%s.%s" [label="%s"];' % (
migration.app_label(), migration.name(), label))
print(" }")
color_index = (color_index + 1) % len(colors)
# For every migration, print its links.
for migrations in all_migrations():
for migration in migrations:
for other in migration.dependencies:
# Added weight tends to keep migrations from the same app
# in vertical alignment
attrs = "[weight=2.0]"
# But the more interesting edges are those between apps
if other.app_label() != migration.app_label():
attrs = "[style=bold]"
print(' "%s.%s" -> "%s.%s" %s;' % (
other.app_label(), other.name(),
migration.app_label(), migration.name(),
attrs
))
print("}");

View File

@ -0,0 +1,264 @@
"""
Migrate management command.
"""
from __future__ import print_function
import os.path, re, sys
from functools import reduce
from optparse import make_option
from django.core.management.base import BaseCommand
from django.conf import settings
from django.utils.importlib import import_module
from south import migration
from south.migration import Migrations
from south.exceptions import NoMigrations
from south.db import DEFAULT_DB_ALIAS
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all', action='store_true', dest='all_apps', default=False,
help='Run the specified migration for all apps.'),
make_option('--list', action='store_true', dest='show_list', default=False,
help='List migrations noting those that have been applied'),
make_option('--changes', action='store_true', dest='show_changes', default=False,
help='List changes for migrations'),
make_option('--skip', action='store_true', dest='skip', default=False,
help='Will skip over out-of-order missing migrations'),
make_option('--merge', action='store_true', dest='merge', default=False,
help='Will run out-of-order missing migrations as they are - no rollbacks.'),
make_option('--no-initial-data', action='store_true', dest='no_initial_data', default=False,
help='Skips loading initial data if specified.'),
make_option('--fake', action='store_true', dest='fake', default=False,
help="Pretends to do the migrations, but doesn't actually execute them."),
make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False,
help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."),
make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False,
help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."),
make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False,
help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."),
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. '
'Defaults to the "default" database.'),
)
if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
option_list += (
make_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
)
help = "Runs migrations for all apps."
args = "[appname] [migrationname|zero] [--all] [--list] [--skip] [--merge] [--no-initial-data] [--fake] [--db-dry-run] [--database=dbalias]"
def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, show_list=False, show_changes=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, **options):
# NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb
# This code imports any module named 'management' in INSTALLED_APPS.
# The 'management' module is the preferred way of listening to post_syncdb
# signals, and since we're sending those out with create_table migrations,
# we need apps to behave correctly.
for app_name in settings.INSTALLED_APPS:
try:
import_module('.management', app_name)
except ImportError as exc:
msg = exc.args[0]
if not msg.startswith('No module named') or 'management' not in msg:
raise
# END DJANGO DUPE CODE
# if all_apps flag is set, shift app over to target
if options.get('all_apps', False):
target = app
app = None
# Migrate each app
if app:
try:
apps = [Migrations(app)]
except NoMigrations:
print("The app '%s' does not appear to use migrations." % app)
print("./manage.py migrate " + self.args)
return
else:
apps = list(migration.all_migrations())
# Do we need to show the list of migrations?
if show_list and apps:
list_migrations(apps, database, **options)
if show_changes and apps:
show_migration_changes(apps)
if not (show_list or show_changes):
for app in apps:
result = migration.migrate_app(
app,
target_name = target,
fake = fake,
db_dry_run = db_dry_run,
verbosity = int(options.get('verbosity', 0)),
interactive = options.get('interactive', True),
load_initial_data = not options.get('no_initial_data', False),
merge = merge,
skip = skip,
database = database,
delete_ghosts = delete_ghosts,
ignore_ghosts = ignore_ghosts,
)
if result is False:
sys.exit(1) # Migration failed, so the command fails.
def list_migrations(apps, database = DEFAULT_DB_ALIAS, **options):
"""
Prints a list of all available migrations, and which ones are currently applied.
Accepts a list of Migrations instances.
"""
from south.models import MigrationHistory
applied_migrations = MigrationHistory.objects.filter(app_name__in=[app.app_label() for app in apps])
if database != DEFAULT_DB_ALIAS:
applied_migrations = applied_migrations.using(database)
applied_migrations_lookup = dict(('%s.%s' % (mi.app_name, mi.migration), mi) for mi in applied_migrations)
print()
for app in apps:
print(" " + app.app_label())
# Get the migrations object
for migration in app:
full_name = migration.app_label() + "." + migration.name()
if full_name in applied_migrations_lookup:
applied_migration = applied_migrations_lookup[full_name]
print(format_migration_list_item(migration.name(), applied=applied_migration.applied, **options))
else:
print(format_migration_list_item(migration.name(), applied=False, **options))
print()
def show_migration_changes(apps):
"""
Prints a list of all available migrations, and which ones are currently applied.
Accepts a list of Migrations instances.
Much simpler, less clear, and much less robust version:
grep "ing " migrations/*.py
"""
for app in apps:
print(app.app_label())
# Get the migrations objects
migrations = [migration for migration in app]
# we use reduce to compare models in pairs, not to generate a value
reduce(diff_migrations, migrations)
def format_migration_list_item(name, applied=True, **options):
if applied:
if int(options.get('verbosity')) >= 2:
return ' (*) %-80s (applied %s)' % (name, applied)
else:
return ' (*) %s' % name
else:
return ' ( ) %s' % name
def diff_migrations(migration1, migration2):
def model_name(models, model):
return models[model].get('Meta', {}).get('object_name', model)
def field_name(models, model, field):
return '%s.%s' % (model_name(models, model), field)
print(" " + migration2.name())
models1 = migration1.migration_class().models
models2 = migration2.migration_class().models
# find new models
for model in models2.keys():
if not model in models1.keys():
print(' added model %s' % model_name(models2, model))
# find removed models
for model in models1.keys():
if not model in models2.keys():
print(' removed model %s' % model_name(models1, model))
# compare models
for model in models1:
if model in models2:
# find added fields
for field in models2[model]:
if not field in models1[model]:
print(' added field %s' % field_name(models2, model, field))
# find removed fields
for field in models1[model]:
if not field in models2[model]:
print(' removed field %s' % field_name(models1, model, field))
# compare fields
for field in models1[model]:
if field in models2[model]:
name = field_name(models1, model, field)
# compare field attributes
field_value1 = models1[model][field]
field_value2 = models2[model][field]
# if a field has become a class, or vice versa
if type(field_value1) != type(field_value2):
print(' type of %s changed from %s to %s' % (
name, field_value1, field_value2))
# if class
elif isinstance(field_value1, dict):
# print ' %s is a class' % name
pass
# else regular field
else:
type1, attr_list1, field_attrs1 = models1[model][field]
type2, attr_list2, field_attrs2 = models2[model][field]
if type1 != type2:
print(' %s type changed from %s to %s' % (
name, type1, type2))
if attr_list1 != []:
print(' %s list %s is not []' % (
name, attr_list1))
if attr_list2 != []:
print(' %s list %s is not []' % (
name, attr_list2))
if attr_list1 != attr_list2:
print(' %s list changed from %s to %s' % (
name, attr_list1, attr_list2))
# find added field attributes
for attr in field_attrs2:
if not attr in field_attrs1:
print(' added %s attribute %s=%s' % (
name, attr, field_attrs2[attr]))
# find removed field attributes
for attr in field_attrs1:
if not attr in field_attrs2:
print(' removed attribute %s(%s=%s)' % (
name, attr, field_attrs1[attr]))
# compare field attributes
for attr in field_attrs1:
if attr in field_attrs2:
value1 = field_attrs1[attr]
value2 = field_attrs2[attr]
if value1 != value2:
print(' %s attribute %s changed from %s to %s' % (
name, attr, value1, value2))
return migration2

View File

@ -0,0 +1,67 @@
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command, CommandError
from django.core.management.base import BaseCommand
from django.conf import settings
from django.db.models import loading
from django.test import simple
from south.migration import Migrations
from south.exceptions import NoMigrations
from south.hacks import hacks
class Command(BaseCommand):
help = "Runs migrations for each app in turn, detecting missing depends_on values."
usage_str = "Usage: ./manage.py migrationcheck"
def handle(self, check_app_name=None, **options):
runner = simple.DjangoTestSuiteRunner(verbosity=0)
err_msg = "Failed to migrate %s; see output for hints at missing dependencies:\n"
hacks.patch_flush_during_test_db_creation()
failures = 0
if check_app_name is None:
app_names = settings.INSTALLED_APPS
else:
app_names = [check_app_name]
for app_name in app_names:
app_label = app_name.split(".")[-1]
if app_name == 'south':
continue
try:
Migrations(app_name)
except (NoMigrations, ImproperlyConfigured):
continue
app = loading.get_app(app_label)
verbosity = int(options.get('verbosity', 1))
if verbosity >= 1:
self.stderr.write("processing %s\n" % app_name)
old_config = runner.setup_databases()
try:
call_command('migrate', app_label, noinput=True, verbosity=verbosity)
for model in loading.get_models(app):
dummy = model._default_manager.exists()
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
failures += 1
if verbosity >= 1:
self.stderr.write(err_msg % app_name)
self.stderr.write("%s\n" % e)
finally:
runner.teardown_databases(old_config)
if failures > 0:
raise CommandError("Missing depends_on found in %s app(s)." % failures)
self.stderr.write("No missing depends_on found.\n")
#
#for each app:
# start with blank db.
# syncdb only south (and contrib?)
#
# migrate a single app all the way up. any errors is missing depends_on.
# for all models of that app, try the default manager:
# from django.db.models import loading
# for m in loading.get_models(loading.get_app('a')):
# m._default_manager.exists()
# Any error is also a missing depends on.

View File

@ -0,0 +1,229 @@
"""
Startmigration command, version 2.
"""
from __future__ import print_function
import sys
import os
import re
import string
import random
import inspect
from optparse import make_option
try:
set
except NameError:
from sets import Set as set
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.conf import settings
from south.migration import Migrations, migrate_app
from south.models import MigrationHistory
from south.exceptions import NoMigrations
from south.creator import changes, actions, freezer
from south.management.commands.datamigration import Command as DataCommand
class Command(DataCommand):
option_list = DataCommand.option_list + (
make_option('--add-model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Attempt to automatically detect differences from the last migration.'),
make_option('--empty', action='store_true', dest='empty', default=False,
help='Make a blank migration.'),
make_option('--update', action='store_true', dest='update', default=False,
help='Update the most recent migration instead of creating a new one. Rollback this migration if it is already applied.'),
)
help = "Creates a new template schema migration for the given app"
usage_str = "Usage: ./manage.py schemamigration appname migrationname [--empty] [--initial] [--auto] [--add-model ModelName] [--add-field ModelName.field_name] [--stdout]"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, freeze_list=None, initial=False, auto=False, stdout=False, added_index_list=None, verbosity=1, empty=False, update=False, **options):
# Any supposed lists that are None become empty lists
added_model_list = added_model_list or []
added_field_list = added_field_list or []
added_index_list = added_index_list or []
freeze_list = freeze_list or []
# --stdout means name = -
if stdout:
name = "-"
# Only allow valid names
if re.search('[^_\w]', name) and name != "-":
self.error("Migration names should contain only alphanumeric characters and underscores.")
# Make sure options are compatable
if initial and (added_model_list or added_field_list or auto):
self.error("You cannot use --initial and other options together\n" + self.usage_str)
if auto and (added_model_list or added_field_list or initial):
self.error("You cannot use --auto and other options together\n" + self.usage_str)
if not app:
self.error("You must provide an app to create a migration for.\n" + self.usage_str)
# See if the app exists
app = app.split(".")[-1]
try:
app_module = models.get_app(app)
except ImproperlyConfigured:
print("There is no enabled application matching '%s'." % app)
return
# Get the Migrations for this app (creating the migrations dir if needed)
migrations = Migrations(app, force_creation=True, verbose_creation=int(verbosity) > 0)
# What actions do we need to do?
if auto:
# Get the old migration
try:
last_migration = migrations[-2 if update else -1]
except IndexError:
self.error("You cannot use --auto on an app with no migrations. Try --initial.")
# Make sure it has stored models
if migrations.app_label() not in getattr(last_migration.migration_class(), "complete_apps", []):
self.error("You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % migrations.app_label())
# Alright, construct two model dicts to run the differ on.
old_defs = dict(
(k, v) for k, v in last_migration.migration_class().models.items()
if k.split(".")[0] == migrations.app_label()
)
new_defs = dict(
(k, v) for k, v in freezer.freeze_apps([migrations.app_label()]).items()
if k.split(".")[0] == migrations.app_label()
)
change_source = changes.AutoChanges(
migrations = migrations,
old_defs = old_defs,
old_orm = last_migration.orm(),
new_defs = new_defs,
)
elif initial:
# Do an initial migration
change_source = changes.InitialChanges(migrations)
else:
# Read the commands manually off of the arguments
if (added_model_list or added_field_list or added_index_list):
change_source = changes.ManualChanges(
migrations,
added_model_list,
added_field_list,
added_index_list,
)
elif empty:
change_source = None
else:
print("You have not passed any of --initial, --auto, --empty, --add-model, --add-field or --add-index.", file=sys.stderr)
sys.exit(1)
# Validate this so we can access the last migration without worrying
if update and not migrations:
self.error("You cannot use --update on an app with no migrations.")
# if not name, there's an error
if not name:
if change_source:
name = change_source.suggest_name()
if update:
name = re.sub(r'^\d{4}_', '', migrations[-1].name())
if not name:
self.error("You must provide a name for this migration\n" + self.usage_str)
# Get the actions, and then insert them into the actions lists
forwards_actions = []
backwards_actions = []
if change_source:
for action_name, params in change_source.get_changes():
# Run the correct Action class
try:
action_class = getattr(actions, action_name)
except AttributeError:
raise ValueError("Invalid action name from source: %s" % action_name)
else:
action = action_class(**params)
action.add_forwards(forwards_actions)
action.add_backwards(backwards_actions)
print(action.console_line(), file=sys.stderr)
# Nowt happen? That's not good for --auto.
if auto and not forwards_actions:
self.error("Nothing seems to have changed.")
# Work out which apps to freeze
apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
# So, what's in this file, then?
file_contents = self.get_migration_template() % {
"forwards": "\n".join(forwards_actions or [" pass"]),
"backwards": "\n".join(backwards_actions or [" pass"]),
"frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
"complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
}
# Deal with update mode as late as possible, avoid a rollback as long
# as something else can go wrong.
if update:
last_migration = migrations[-1]
if MigrationHistory.objects.filter(applied__isnull=False, app_name=app, migration=last_migration.name()):
print("Migration to be updated, %s, is already applied, rolling it back now..." % last_migration.name(), file=sys.stderr)
migrate_app(migrations, 'current-1', verbosity=verbosity)
for ext in ('py', 'pyc'):
old_filename = "%s.%s" % (os.path.join(migrations.migrations_dir(), last_migration.filename), ext)
if os.path.isfile(old_filename):
os.unlink(old_filename)
migrations.remove(last_migration)
# See what filename is next in line. We assume they use numbers.
new_filename = migrations.next_filename(name)
# - is a special name which means 'print to stdout'
if name == "-":
print(file_contents)
# Write the migration file if the name isn't -
else:
fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
fp.write(file_contents)
fp.close()
verb = 'Updated' if update else 'Created'
if empty:
print("%s %s. You must now edit this migration and add the code for each direction." % (verb, new_filename), file=sys.stderr)
else:
print("%s %s. You can now apply this migration with: ./manage.py migrate %s" % (verb, new_filename, app), file=sys.stderr)
def get_migration_template(self):
return MIGRATION_TEMPLATE
MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
%(forwards)s
def backwards(self, orm):
%(backwards)s
models = %(frozen_models)s
%(complete_apps)s"""

View File

@ -0,0 +1,33 @@
"""
Now-obsolete startmigration command.
"""
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Attempt to automatically detect differences from the last migration.'),
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
make_option('--stdout', action='store_true', dest='stdout', default=False,
help='Print the migration to stdout instead of writing it to a file.'),
)
help = "Deprecated command"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
print("The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands.")

View File

@ -0,0 +1,115 @@
"""
Overridden syncdb command
"""
from __future__ import print_function
import sys
from optparse import make_option
from django.core.management.base import NoArgsCommand, BaseCommand
from django.core.management.color import no_style
from django.utils.datastructures import SortedDict
from django.core.management.commands import syncdb
from django.conf import settings
from django.db import models
from django.db.models.loading import cache
from django.core import management
from south.db import dbs
from south import migration
from south.exceptions import NoMigrations
def get_app_label(app):
return '.'.join( app.__name__.split('.')[0:-1] )
class Command(NoArgsCommand):
option_list = syncdb.Command.option_list + (
make_option('--migrate', action='store_true', dest='migrate', default=False,
help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'),
make_option('--all', action='store_true', dest='migrate_all', default=False,
help='Makes syncdb work on all apps, even migrated ones. Be careful!'),
)
if '--verbosity' not in [opt.get_opt_string() for opt in syncdb.Command.option_list]:
option_list += (
make_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
)
help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations."
def handle_noargs(self, migrate_all=False, **options):
# Import the 'management' module within each installed app, to register
# dispatcher events.
# This is copied from Django, to fix bug #511.
try:
from django.utils.importlib import import_module
except ImportError:
pass # TODO: Remove, only for Django1.0
else:
for app_name in settings.INSTALLED_APPS:
try:
import_module('.management', app_name)
except ImportError as exc:
msg = exc.args[0]
if not msg.startswith('No module named') or 'management' not in msg:
raise
# Work out what uses migrations and so doesn't need syncing
apps_needing_sync = []
apps_migrated = []
for app in models.get_apps():
app_label = get_app_label(app)
if migrate_all:
apps_needing_sync.append(app_label)
else:
try:
migrations = migration.Migrations(app_label)
except NoMigrations:
# It needs syncing
apps_needing_sync.append(app_label)
else:
# This is a migrated app, leave it
apps_migrated.append(app_label)
verbosity = int(options.get('verbosity', 0))
# Run syncdb on only the ones needed
if verbosity:
print("Syncing...")
old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
old_app_store, cache.app_store = cache.app_store, SortedDict([
(k, v) for (k, v) in cache.app_store.items()
if get_app_label(k) in apps_needing_sync
])
# This will allow the setting of the MySQL storage engine, for example.
for db in dbs.values():
db.connection_init()
# OK, run the actual syncdb
syncdb.Command().execute(**options)
settings.INSTALLED_APPS = old_installed
cache.app_store = old_app_store
# Migrate if needed
if options.get('migrate', True):
if verbosity:
print("Migrating...")
# convert from store_true to store_false
options['no_initial_data'] = not options.get('load_initial_data', True)
management.call_command('migrate', **options)
# Be obvious about what we did
if verbosity:
print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync))
if options.get('migrate', True):
if verbosity:
print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated))
else:
if verbosity:
print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated))
print("(use ./manage.py migrate to migrate these)")

View File

@ -0,0 +1,8 @@
from django.core.management.commands import test
from south.management.commands import patch_for_test_db_setup
class Command(test.Command):
def handle(self, *args, **kwargs):
patch_for_test_db_setup()
super(Command, self).handle(*args, **kwargs)

View File

@ -0,0 +1,8 @@
from django.core.management.commands import testserver
from south.management.commands import patch_for_test_db_setup
class Command(testserver.Command):
def handle(self, *args, **kwargs):
patch_for_test_db_setup()
super(Command, self).handle(*args, **kwargs)

235
south/migration/__init__.py Normal file
View File

@ -0,0 +1,235 @@
"""
Main migration logic.
"""
from __future__ import print_function
import sys
from django.core.exceptions import ImproperlyConfigured
import south.db
from south import exceptions
from south.models import MigrationHistory
from south.db import db, DEFAULT_DB_ALIAS
from south.migration.migrators import (Backwards, Forwards,
DryRunMigrator, FakeMigrator,
LoadInitialDataMigrator)
from south.migration.base import Migration, Migrations
from south.migration.utils import SortedSet
from south.migration.base import all_migrations
from south.signals import pre_migrate, post_migrate
def to_apply(forwards, done):
return [m for m in forwards if m not in done]
def to_unapply(backwards, done):
return [m for m in backwards if m in done]
def problems(pending, done):
last = None
if not pending:
raise StopIteration()
for migration in pending:
if migration in done:
last = migration
continue
if last and migration not in done:
yield last, migration
def forwards_problems(pending, done, verbosity):
"""
Takes the list of linearised pending migrations, and the set of done ones,
and returns the list of problems, if any.
"""
return inner_problem_check(problems(reversed(pending), done), done, verbosity)
def backwards_problems(pending, done, verbosity):
return inner_problem_check(problems(pending, done), done, verbosity)
def inner_problem_check(problems, done, verbosity):
"Takes a set of possible problems and gets the actual issues out of it."
result = []
for last, migration in problems:
checked = set([])
# 'Last' is the last applied migration. Step back from it until we
# either find nothing wrong, or we find something.
to_check = list(last.dependencies)
while to_check:
checking = to_check.pop()
if checking in checked:
continue
checked.add(checking)
if checking not in done:
# That's bad. Error.
if verbosity:
print((" ! Migration %s should not have been applied "
"before %s but was." % (last, checking)))
result.append((last, checking))
else:
to_check.extend(checking.dependencies)
return result
def check_migration_histories(histories, delete_ghosts=False, ignore_ghosts=False):
"Checks that there's no 'ghost' migrations in the database."
exists = SortedSet()
ghosts = []
for h in histories:
try:
m = h.get_migration()
m.migration()
except exceptions.UnknownMigration:
ghosts.append(h)
except ImproperlyConfigured:
pass # Ignore missing applications
else:
exists.add(m)
if ghosts:
# They may want us to delete ghosts.
if delete_ghosts:
for h in ghosts:
h.delete()
elif not ignore_ghosts:
raise exceptions.GhostMigrations(ghosts)
return exists
def get_dependencies(target, migrations):
forwards = list
backwards = list
if target is None:
backwards = migrations[0].backwards_plan
else:
forwards = target.forwards_plan
# When migrating backwards we want to remove up to and
# including the next migration up in this app (not the next
# one, that includes other apps)
migration_before_here = target.next()
if migration_before_here:
backwards = migration_before_here.backwards_plan
return forwards, backwards
def get_direction(target, applied, migrations, verbosity, interactive):
# Get the forwards and reverse dependencies for this target
forwards, backwards = get_dependencies(target, migrations)
# Is the whole forward branch applied?
problems = None
forwards = forwards()
workplan = to_apply(forwards, applied)
if not workplan:
# If they're all applied, we only know it's not backwards
direction = None
else:
# If the remaining migrations are strictly a right segment of
# the forwards trace, we just need to go forwards to our
# target (and check for badness)
problems = forwards_problems(forwards, applied, verbosity)
direction = Forwards(verbosity=verbosity, interactive=interactive)
if not problems:
# What about the whole backward trace then?
backwards = backwards()
missing_backwards = to_apply(backwards, applied)
if missing_backwards != backwards:
# If what's missing is a strict left segment of backwards (i.e.
# all the higher migrations) then we need to go backwards
workplan = to_unapply(backwards, applied)
problems = backwards_problems(backwards, applied, verbosity)
direction = Backwards(verbosity=verbosity, interactive=interactive)
return direction, problems, workplan
def get_migrator(direction, db_dry_run, fake, load_initial_data):
if not direction:
return direction
if db_dry_run:
direction = DryRunMigrator(migrator=direction, ignore_fail=False)
elif fake:
direction = FakeMigrator(migrator=direction)
elif load_initial_data:
direction = LoadInitialDataMigrator(migrator=direction)
return direction
def get_unapplied_migrations(migrations, applied_migrations):
applied_migration_names = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
for migration in migrations:
is_applied = '%s.%s' % (migration.app_label(), migration.name()) in applied_migration_names
if not is_applied:
yield migration
def migrate_app(migrations, target_name=None, merge=False, fake=False, db_dry_run=False, yes=False, verbosity=0, load_initial_data=False, skip=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, interactive=False):
app_label = migrations.app_label()
verbosity = int(verbosity)
# Fire off the pre-migrate signal
pre_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
# If there aren't any, quit quizically
if not migrations:
print("? You have no migrations for the '%s' app. You might want some." % app_label)
return
# Load the entire dependency graph
Migrations.calculate_dependencies()
# Check there's no strange ones in the database
applied_all = MigrationHistory.objects.filter(applied__isnull=False).order_by('applied').using(database)
applied = applied_all.filter(app_name=app_label).using(database)
south.db.db = south.db.dbs[database]
Migrations.invalidate_all_modules()
south.db.db.debug = (verbosity > 1)
if target_name == 'current-1':
if applied.count() > 1:
previous_migration = applied[applied.count() - 2]
if verbosity:
print('previous_migration: %s (applied: %s)' % (previous_migration.migration, previous_migration.applied))
target_name = previous_migration.migration
else:
if verbosity:
print('previous_migration: zero')
target_name = 'zero'
elif target_name == 'current+1':
try:
first_unapplied_migration = get_unapplied_migrations(migrations, applied).next()
target_name = first_unapplied_migration.name()
except StopIteration:
target_name = None
applied_all = check_migration_histories(applied_all, delete_ghosts, ignore_ghosts)
# Guess the target_name
target = migrations.guess_migration(target_name)
if verbosity:
if target_name not in ('zero', None) and target.name() != target_name:
print(" - Soft matched migration %s to %s." % (target_name,
target.name()))
print("Running migrations for %s:" % app_label)
# Get the forwards and reverse dependencies for this target
direction, problems, workplan = get_direction(target, applied_all, migrations,
verbosity, interactive)
if problems and not (merge or skip):
raise exceptions.InconsistentMigrationHistory(problems)
# Perform the migration
migrator = get_migrator(direction, db_dry_run, fake, load_initial_data)
if migrator:
migrator.print_title(target)
success = migrator.migrate_many(target, workplan, database)
# Finally, fire off the post-migrate signal
if success:
post_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
else:
if verbosity:
# Say there's nothing.
print('- Nothing to migrate.')
# If we have initial data enabled, and we're at the most recent
# migration, do initial data.
# Note: We use a fake Forwards() migrator here. It's never used really.
if load_initial_data:
migrator = LoadInitialDataMigrator(migrator=Forwards(verbosity=verbosity))
migrator.load_initial_data(target, db=database)
# Send signal.
post_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)

451
south/migration/base.py Normal file
View File

@ -0,0 +1,451 @@
from __future__ import print_function
from collections import deque
import datetime
from imp import reload
import os
import re
import sys
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.conf import settings
from django.utils import importlib
from south import exceptions
from south.migration.utils import depends, dfs, flatten, get_app_label
from south.orm import FakeORM
from south.utils import memoize, ask_for_it_by_name, datetime_utils
from south.migration.utils import app_label_to_app_module
from south.utils.py3 import string_types, with_metaclass
def all_migrations(applications=None):
"""
Returns all Migrations for all `applications` that are migrated.
"""
if applications is None:
applications = models.get_apps()
for model_module in applications:
# The app they've passed is the models module - go up one level
app_path = ".".join(model_module.__name__.split(".")[:-1])
app = ask_for_it_by_name(app_path)
try:
yield Migrations(app)
except exceptions.NoMigrations:
pass
def application_to_app_label(application):
"Works out the app label from either the app label, the app name, or the module"
if isinstance(application, string_types):
app_label = application.split('.')[-1]
else:
app_label = application.__name__.split('.')[-1]
return app_label
class MigrationsMetaclass(type):
"""
Metaclass which ensures there is only one instance of a Migrations for
any given app.
"""
def __init__(self, name, bases, dict):
super(MigrationsMetaclass, self).__init__(name, bases, dict)
self.instances = {}
def __call__(self, application, **kwds):
app_label = application_to_app_label(application)
# If we don't already have an instance, make one
if app_label not in self.instances:
self.instances[app_label] = super(MigrationsMetaclass, self).__call__(app_label_to_app_module(app_label), **kwds)
return self.instances[app_label]
def _clear_cache(self):
"Clears the cache of Migration objects."
self.instances = {}
class Migrations(with_metaclass(MigrationsMetaclass, list)):
"""
Holds a list of Migration objects for a particular app.
"""
if getattr(settings, "SOUTH_USE_PYC", False):
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
r'(\.pyc?)?$') # Match .py or .pyc files, or module dirs
else:
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
r'(\.py)?$') # Match only .py files, or module dirs
def __init__(self, application, force_creation=False, verbose_creation=True):
"Constructor. Takes the module of the app, NOT its models (like get_app returns)"
self._cache = {}
self.set_application(application, force_creation, verbose_creation)
def create_migrations_directory(self, verbose=True):
"Given an application, ensures that the migrations directory is ready."
migrations_dir = self.migrations_dir()
# Make the directory if it's not already there
if not os.path.isdir(migrations_dir):
if verbose:
print("Creating migrations directory at '%s'..." % migrations_dir)
os.mkdir(migrations_dir)
# Same for __init__.py
init_path = os.path.join(migrations_dir, "__init__.py")
if not os.path.isfile(init_path):
# Touch the init py file
if verbose:
print("Creating __init__.py in '%s'..." % migrations_dir)
open(init_path, "w").close()
def migrations_dir(self):
"""
Returns the full path of the migrations directory.
If it doesn't exist yet, returns where it would exist, based on the
app's migrations module (defaults to app.migrations)
"""
module_path = self.migrations_module()
try:
module = importlib.import_module(module_path)
except ImportError:
# There's no migrations module made yet; guess!
try:
parent = importlib.import_module(".".join(module_path.split(".")[:-1]))
except ImportError:
# The parent doesn't even exist, that's an issue.
raise exceptions.InvalidMigrationModule(
application = self.application.__name__,
module = module_path,
)
else:
# Good guess.
return os.path.join(os.path.dirname(parent.__file__), module_path.split(".")[-1])
else:
# Get directory directly
return os.path.dirname(module.__file__)
def migrations_module(self):
"Returns the module name of the migrations module for this"
app_label = application_to_app_label(self.application)
if hasattr(settings, "SOUTH_MIGRATION_MODULES"):
if app_label in settings.SOUTH_MIGRATION_MODULES:
# There's an override.
return settings.SOUTH_MIGRATION_MODULES[app_label]
# We see if the south_migrations module exists first, and
# use that if we find it.
module_name = self._application.__name__ + '.south_migrations'
try:
importlib.import_module(module_name)
except ImportError:
return self._application.__name__ + '.migrations'
else:
return module_name
def get_application(self):
return self._application
def set_application(self, application, force_creation=False, verbose_creation=True):
"""
Called when the application for this Migrations is set.
Imports the migrations module object, and throws a paddy if it can't.
"""
self._application = application
if not hasattr(application, 'migrations') and not hasattr(application, 'south_migrations'):
try:
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
except ImportError:
if force_creation:
self.create_migrations_directory(verbose_creation)
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
else:
raise exceptions.NoMigrations(application)
if hasattr(application, 'south_migrations'):
self._load_migrations_module(application.south_migrations)
else:
self._load_migrations_module(application.migrations)
application = property(get_application, set_application)
def _load_migrations_module(self, module):
self._migrations = module
filenames = []
dirname = self.migrations_dir()
for f in os.listdir(dirname):
if self.MIGRATION_FILENAME.match(os.path.basename(f)):
full_path = os.path.join(dirname, f)
# If it's a .pyc file, only append if the .py isn't already around
if f.endswith(".pyc") and (os.path.isfile(full_path[:-1])):
continue
# If it's a module directory, only append if it contains __init__.py[c].
if os.path.isdir(full_path):
if not (os.path.isfile(os.path.join(full_path, "__init__.py")) or \
(getattr(settings, "SOUTH_USE_PYC", False) and \
os.path.isfile(os.path.join(full_path, "__init__.pyc")))):
continue
filenames.append(f)
filenames.sort()
self.extend(self.migration(f) for f in filenames)
def migration(self, filename):
name = Migration.strip_filename(filename)
if name not in self._cache:
self._cache[name] = Migration(self, name)
return self._cache[name]
def __getitem__(self, value):
if isinstance(value, string_types):
return self.migration(value)
return super(Migrations, self).__getitem__(value)
def _guess_migration(self, prefix):
prefix = Migration.strip_filename(prefix)
matches = [m for m in self if m.name().startswith(prefix)]
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise exceptions.MultiplePrefixMatches(prefix, matches)
else:
raise exceptions.UnknownMigration(prefix, None)
def guess_migration(self, target_name):
if target_name == 'zero' or not self:
return
elif target_name is None:
return self[-1]
else:
return self._guess_migration(prefix=target_name)
def app_label(self):
return self._application.__name__.split('.')[-1]
def full_name(self):
return self._migrations.__name__
@classmethod
def calculate_dependencies(cls, force=False):
"Goes through all the migrations, and works out the dependencies."
if getattr(cls, "_dependencies_done", False) and not force:
return
for migrations in all_migrations():
for migration in migrations:
migration.calculate_dependencies()
cls._dependencies_done = True
@staticmethod
def invalidate_all_modules():
"Goes through all the migrations, and invalidates all cached modules."
for migrations in all_migrations():
for migration in migrations:
migration.invalidate_module()
def next_filename(self, name):
"Returns the fully-formatted filename of what a new migration 'name' would be"
highest_number = 0
for migration in self:
try:
number = int(migration.name().split("_")[0])
highest_number = max(highest_number, number)
except ValueError:
pass
# Work out the new filename
return "%04i_%s.py" % (
highest_number + 1,
name,
)
class Migration(object):
"""
Class which represents a particular migration file on-disk.
"""
def __init__(self, migrations, filename):
"""
Returns the migration class implied by 'filename'.
"""
self.migrations = migrations
self.filename = filename
self.dependencies = set()
self.dependents = set()
def __str__(self):
return self.app_label() + ':' + self.name()
def __repr__(self):
return '<Migration: %s>' % str(self)
def __eq__(self, other):
return self.app_label() == other.app_label() and self.name() == other.name()
def __hash__(self):
return hash(str(self))
def app_label(self):
return self.migrations.app_label()
@staticmethod
def strip_filename(filename):
return os.path.splitext(os.path.basename(filename))[0]
def name(self):
return self.strip_filename(os.path.basename(self.filename))
def full_name(self):
return self.migrations.full_name() + '.' + self.name()
def migration(self):
"Tries to load the actual migration module"
full_name = self.full_name()
try:
migration = sys.modules[full_name]
except KeyError:
try:
migration = __import__(full_name, {}, {}, ['Migration'])
except ImportError as e:
raise exceptions.UnknownMigration(self, sys.exc_info())
except Exception as e:
raise exceptions.BrokenMigration(self, sys.exc_info())
# Override some imports
migration._ = lambda x: x # Fake i18n
migration.datetime = datetime_utils
return migration
migration = memoize(migration)
def migration_class(self):
"Returns the Migration class from the module"
return self.migration().Migration
def migration_instance(self):
"Instantiates the migration_class"
return self.migration_class()()
migration_instance = memoize(migration_instance)
def previous(self):
"Returns the migration that comes before this one in the sequence."
index = self.migrations.index(self) - 1
if index < 0:
return None
return self.migrations[index]
previous = memoize(previous)
def next(self):
"Returns the migration that comes after this one in the sequence."
index = self.migrations.index(self) + 1
if index >= len(self.migrations):
return None
return self.migrations[index]
next = memoize(next)
def _get_dependency_objects(self, attrname):
"""
Given the name of an attribute (depends_on or needed_by), either yields
a list of migration objects representing it, or errors out.
"""
for app, name in getattr(self.migration_class(), attrname, []):
try:
migrations = Migrations(app)
except ImproperlyConfigured:
raise exceptions.DependsOnUnmigratedApplication(self, app)
migration = migrations.migration(name)
try:
migration.migration()
except exceptions.UnknownMigration:
raise exceptions.DependsOnUnknownMigration(self, migration)
if migration.is_before(self) == False:
raise exceptions.DependsOnHigherMigration(self, migration)
yield migration
def calculate_dependencies(self):
"""
Loads dependency info for this migration, and stores it in itself
and any other relevant migrations.
"""
# Normal deps first
for migration in self._get_dependency_objects("depends_on"):
self.dependencies.add(migration)
migration.dependents.add(self)
# And reverse deps
for migration in self._get_dependency_objects("needed_by"):
self.dependents.add(migration)
migration.dependencies.add(self)
# And implicit ordering deps
previous = self.previous()
if previous:
self.dependencies.add(previous)
previous.dependents.add(self)
def invalidate_module(self):
"""
Removes the cached version of this migration's module import, so we
have to re-import it. Used when south.db.db changes.
"""
reload(self.migration())
self.migration._invalidate()
def forwards(self):
return self.migration_instance().forwards
def backwards(self):
return self.migration_instance().backwards
def forwards_plan(self):
"""
Returns a list of Migration objects to be applied, in order.
This list includes `self`, which will be applied last.
"""
return depends(self, lambda x: x.dependencies)
def _backwards_plan(self):
return depends(self, lambda x: x.dependents)
def backwards_plan(self):
"""
Returns a list of Migration objects to be unapplied, in order.
This list includes `self`, which will be unapplied last.
"""
return list(self._backwards_plan())
def is_before(self, other):
if self.migrations == other.migrations:
if self.filename < other.filename:
return True
return False
def is_after(self, other):
if self.migrations == other.migrations:
if self.filename > other.filename:
return True
return False
def prev_orm(self):
if getattr(self.migration_class(), 'symmetrical', False):
return self.orm()
previous = self.previous()
if previous is None:
# First migration? The 'previous ORM' is empty.
return FakeORM(None, self.app_label())
return previous.orm()
prev_orm = memoize(prev_orm)
def orm(self):
return FakeORM(self.migration_class(), self.app_label())
orm = memoize(orm)
def no_dry_run(self):
migration_class = self.migration_class()
try:
return migration_class.no_dry_run
except AttributeError:
return False

View File

@ -0,0 +1,381 @@
from __future__ import print_function
from copy import copy, deepcopy
import datetime
import inspect
import sys
import traceback
from django.core.management import call_command
from django.core.management.commands import loaddata
from django.db import models
from django import VERSION as DJANGO_VERSION
import south.db
from south import exceptions
from south.db import DEFAULT_DB_ALIAS
from south.models import MigrationHistory
from south.signals import ran_migration
from south.utils.py3 import StringIO, iteritems
class Migrator(object):
def __init__(self, verbosity=0, interactive=False):
self.verbosity = int(verbosity)
self.interactive = bool(interactive)
@staticmethod
def title(target):
raise NotImplementedError()
def print_title(self, target):
if self.verbosity:
print(self.title(target))
@staticmethod
def status(target):
raise NotImplementedError()
def print_status(self, migration):
status = self.status(migration)
if self.verbosity and status:
print(status)
@staticmethod
def orm(migration):
raise NotImplementedError()
def backwards(self, migration):
return self._wrap_direction(migration.backwards(), migration.prev_orm())
def direction(self, migration):
raise NotImplementedError()
@staticmethod
def _wrap_direction(direction, orm):
args = inspect.getargspec(direction)
if len(args[0]) == 1:
# Old migration, no ORM should be passed in
return direction
return (lambda: direction(orm))
@staticmethod
def record(migration, database):
raise NotImplementedError()
def run_migration_error(self, migration, extra_info=''):
return (
' ! Error found during real run of migration! Aborting.\n'
'\n'
' ! Since you have a database that does not support running\n'
' ! schema-altering statements in transactions, we have had \n'
' ! to leave it in an interim state between migrations.\n'
'%s\n'
' ! The South developers regret this has happened, and would\n'
' ! like to gently persuade you to consider a slightly\n'
' ! easier-to-deal-with DBMS (one that supports DDL transactions)\n'
' ! NOTE: The error which caused the migration to fail is further up.'
) % extra_info
def run_migration(self, migration, database):
migration_function = self.direction(migration)
south.db.db.start_transaction()
try:
migration_function()
south.db.db.execute_deferred_sql()
if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator):
# record us as having done this in the same transaction,
# since we're not in a dry run
self.record(migration, database)
except:
south.db.db.rollback_transaction()
if not south.db.db.has_ddl_transactions:
print(self.run_migration_error(migration))
print("Error in migration: %s" % migration)
raise
else:
try:
south.db.db.commit_transaction()
except:
print("Error during commit in migration: %s" % migration)
raise
def run(self, migration, database):
# Get the correct ORM.
south.db.db.current_orm = self.orm(migration)
# If we're not already in a dry run, and the database doesn't support
# running DDL inside a transaction, *cough*MySQL*cough* then do a dry
# run first.
if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator):
if not south.db.db.has_ddl_transactions:
dry_run = DryRunMigrator(migrator=self, ignore_fail=False)
dry_run.run_migration(migration, database)
return self.run_migration(migration, database)
def send_ran_migration(self, migration, database):
ran_migration.send(None,
app=migration.app_label(),
migration=migration,
method=self.__class__.__name__.lower(),
verbosity=self.verbosity,
interactive=self.interactive,
db=database)
def migrate(self, migration, database):
"""
Runs the specified migration forwards/backwards, in order.
"""
app = migration.migrations._migrations
migration_name = migration.name()
self.print_status(migration)
result = self.run(migration, database)
self.send_ran_migration(migration, database)
return result
def migrate_many(self, target, migrations, database):
raise NotImplementedError()
class MigratorWrapper(object):
def __init__(self, migrator, *args, **kwargs):
self._migrator = copy(migrator)
attributes = dict([(k, getattr(self, k))
for k in self.__class__.__dict__
if not k.startswith('__')])
self._migrator.__dict__.update(attributes)
self._migrator.__dict__['_wrapper'] = self
def __getattr__(self, name):
return getattr(self._migrator, name)
class DryRunMigrator(MigratorWrapper):
def __init__(self, ignore_fail=True, *args, **kwargs):
super(DryRunMigrator, self).__init__(*args, **kwargs)
self._ignore_fail = ignore_fail
def _run_migration(self, migration):
if migration.no_dry_run():
if self.verbosity:
print(" - Migration '%s' is marked for no-dry-run." % migration)
return
for name, db in iteritems(south.db.dbs):
south.db.dbs[name].dry_run = True
# preserve the constraint cache as it can be mutated by the dry run
constraint_cache = deepcopy(south.db.db._constraint_cache)
if self._ignore_fail:
south.db.db.debug, old_debug = False, south.db.db.debug
pending_creates = south.db.db.get_pending_creates()
south.db.db.start_transaction()
migration_function = self.direction(migration)
try:
try:
migration_function()
south.db.db.execute_deferred_sql()
except:
raise exceptions.FailedDryRun(migration, sys.exc_info())
finally:
south.db.db.rollback_transactions_dry_run()
if self._ignore_fail:
south.db.db.debug = old_debug
south.db.db.clear_run_data(pending_creates)
for name, db in iteritems(south.db.dbs):
south.db.dbs[name].dry_run = False
# restore the preserved constraint cache from before dry run was
# executed
south.db.db._constraint_cache = constraint_cache
def run_migration(self, migration, database):
try:
self._run_migration(migration)
except exceptions.FailedDryRun:
if self._ignore_fail:
return False
raise
def send_ran_migration(self, *args, **kwargs):
pass
class FakeMigrator(MigratorWrapper):
def run(self, migration, database):
# Don't actually run, just record as if ran
self.record(migration, database)
if self.verbosity:
print(' (faked)')
def send_ran_migration(self, *args, **kwargs):
pass
class LoadInitialDataMigrator(MigratorWrapper):
def load_initial_data(self, target, db='default'):
if target is None or target != target.migrations[-1]:
return
# Load initial data, if we ended up at target
if self.verbosity:
print(" - Loading initial data for %s." % target.app_label())
if DJANGO_VERSION < (1, 6):
self.pre_1_6(target, db)
else:
self.post_1_6(target, db)
def pre_1_6(self, target, db):
# Override Django's get_apps call temporarily to only load from the
# current app
old_get_apps = models.get_apps
new_get_apps = lambda: [models.get_app(target.app_label())]
models.get_apps = new_get_apps
loaddata.get_apps = new_get_apps
try:
call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db)
finally:
models.get_apps = old_get_apps
loaddata.get_apps = old_get_apps
def post_1_6(self, target, db):
import django.db.models.loading
## build a new 'AppCache' object with just the app we care about.
old_cache = django.db.models.loading.cache
new_cache = django.db.models.loading.AppCache()
new_cache.get_apps = lambda: [new_cache.get_app(target.app_label())]
## monkeypatch
django.db.models.loading.cache = new_cache
try:
call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db)
finally:
## unmonkeypatch
django.db.models.loading.cache = old_cache
def migrate_many(self, target, migrations, database):
migrator = self._migrator
result = migrator.__class__.migrate_many(migrator, target, migrations, database)
if result:
self.load_initial_data(target, db=database)
return True
class Forwards(Migrator):
"""
Runs the specified migration forwards, in order.
"""
torun = 'forwards'
@staticmethod
def title(target):
if target is not None:
return " - Migrating forwards to %s." % target.name()
else:
assert False, "You cannot migrate forwards to zero."
@staticmethod
def status(migration):
return ' > %s' % migration
@staticmethod
def orm(migration):
return migration.orm()
def forwards(self, migration):
return self._wrap_direction(migration.forwards(), migration.orm())
direction = forwards
@staticmethod
def record(migration, database):
# Record us as having done this
record = MigrationHistory.for_migration(migration, database)
try:
from django.utils.timezone import now
record.applied = now()
except ImportError:
record.applied = datetime.datetime.utcnow()
if database != DEFAULT_DB_ALIAS:
record.save(using=database)
else:
# Django 1.1 and below always go down this branch.
record.save()
def format_backwards(self, migration):
if migration.no_dry_run():
return " (migration cannot be dry-run; cannot discover commands)"
old_debug, old_dry_run = south.db.db.debug, south.db.db.dry_run
south.db.db.debug = south.db.db.dry_run = True
stdout = sys.stdout
sys.stdout = StringIO()
try:
try:
self.backwards(migration)()
return sys.stdout.getvalue()
except:
raise
finally:
south.db.db.debug, south.db.db.dry_run = old_debug, old_dry_run
sys.stdout = stdout
def run_migration_error(self, migration, extra_info=''):
extra_info = ('\n'
'! You *might* be able to recover with:'
'%s'
'%s' %
(self.format_backwards(migration), extra_info))
return super(Forwards, self).run_migration_error(migration, extra_info)
def migrate_many(self, target, migrations, database):
try:
for migration in migrations:
result = self.migrate(migration, database)
if result is False: # The migrations errored, but nicely.
return False
finally:
# Call any pending post_syncdb signals
south.db.db.send_pending_create_signals(verbosity=self.verbosity,
interactive=self.interactive)
return True
class Backwards(Migrator):
"""
Runs the specified migration backwards, in order.
"""
torun = 'backwards'
@staticmethod
def title(target):
if target is None:
return " - Migrating backwards to zero state."
else:
return " - Migrating backwards to just after %s." % target.name()
@staticmethod
def status(migration):
return ' < %s' % migration
@staticmethod
def orm(migration):
return migration.prev_orm()
direction = Migrator.backwards
@staticmethod
def record(migration, database):
# Record us as having not done this
record = MigrationHistory.for_migration(migration, database)
if record.id is not None:
if database != DEFAULT_DB_ALIAS:
record.delete(using=database)
else:
# Django 1.1 always goes down here
record.delete()
def migrate_many(self, target, migrations, database):
for migration in migrations:
self.migrate(migration, database)
return True

94
south/migration/utils.py Normal file
View File

@ -0,0 +1,94 @@
import sys
from collections import deque
from django.utils.datastructures import SortedDict
from django.db import models
from south import exceptions
class SortedSet(SortedDict):
def __init__(self, data=tuple()):
self.extend(data)
def __str__(self):
return "SortedSet(%s)" % list(self)
def add(self, value):
self[value] = True
def remove(self, value):
del self[value]
def extend(self, iterable):
[self.add(k) for k in iterable]
def get_app_label(app):
"""
Returns the _internal_ app label for the given app module.
i.e. for <module django.contrib.auth.models> will return 'auth'
"""
return app.__name__.split('.')[-2]
def app_label_to_app_module(app_label):
"""
Given the app label, returns the module of the app itself (unlike models.get_app,
which returns the models module)
"""
# Get the models module
app = models.get_app(app_label)
module_name = ".".join(app.__name__.split(".")[:-1])
try:
module = sys.modules[module_name]
except KeyError:
__import__(module_name, {}, {}, [''])
module = sys.modules[module_name]
return module
def flatten(*stack):
stack = deque(stack)
while stack:
try:
x = next(stack[0])
except TypeError:
stack[0] = iter(stack[0])
x = next(stack[0])
except StopIteration:
stack.popleft()
continue
if hasattr(x, '__iter__') and not isinstance(x, str):
stack.appendleft(x)
else:
yield x
dependency_cache = {}
def _dfs(start, get_children, path):
if (start, get_children) in dependency_cache:
return dependency_cache[(start, get_children)]
results = []
if start in path:
raise exceptions.CircularDependency(path[path.index(start):] + [start])
path.append(start)
results.append(start)
children = sorted(get_children(start), key=lambda x: str(x))
# We need to apply all the migrations this one depends on
for n in children:
results = _dfs(n, get_children, path) + results
path.pop()
results = list(SortedSet(results))
dependency_cache[(start, get_children)] = results
return results
def dfs(start, get_children):
return _dfs(start, get_children, [])
def depends(start, get_children):
return dfs(start, get_children)

43
south/models.py Normal file
View File

@ -0,0 +1,43 @@
from django.db import models
from south.db import DEFAULT_DB_ALIAS
# If we detect Django 1.7 or higher, then exit
# Placed here so it's guaranteed to be imported on Django start
import django
if django.VERSION[0] > 1 or (django.VERSION[0] == 1 and django.VERSION[1] > 6):
raise RuntimeError("South does not support Django 1.7 or higher. Please use native Django migrations.")
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)

464
south/modelsinspector.py Normal file
View File

@ -0,0 +1,464 @@
"""
Like the old south.modelsparser, but using introspection where possible
rather than direct inspection of models.py.
"""
from __future__ import print_function
import datetime
import re
import decimal
from south.utils import get_attribute, auto_through
from south.utils.py3 import text_type
from django.db import models
from django.db.models.base import ModelBase, Model
from django.db.models.fields import NOT_PROVIDED
from django.conf import settings
from django.utils.functional import Promise
from django.contrib.contenttypes import generic
from django.utils.datastructures import SortedDict
from django.utils import datetime_safe
NOISY = False
try:
from django.utils import timezone
except ImportError:
timezone = False
# Define any converter functions first to prevent NameErrors
def convert_on_delete_handler(value):
django_db_models_module = 'models' # relative to standard import 'django.db'
if hasattr(models, "PROTECT"):
if value in (models.CASCADE, models.PROTECT, models.DO_NOTHING, models.SET_DEFAULT):
# straightforward functions
return '%s.%s' % (django_db_models_module, value.__name__)
else:
# This is totally dependent on the implementation of django.db.models.deletion.SET
func_name = getattr(value, '__name__', None)
if func_name == 'set_on_delete':
# we must inspect the function closure to see what parameters were passed in
closure_contents = value.__closure__[0].cell_contents
if closure_contents is None:
return "%s.SET_NULL" % (django_db_models_module)
# simple function we can perhaps cope with:
elif hasattr(closure_contents, '__call__'):
raise ValueError("South does not support on_delete with SET(function) as values.")
else:
# Attempt to serialise the value
return "%s.SET(%s)" % (django_db_models_module, value_clean(closure_contents))
raise ValueError("%s was not recognized as a valid model deletion handler. Possible values: %s." % (value, ', '.join(f.__name__ for f in (models.CASCADE, models.PROTECT, models.SET, models.SET_NULL, models.SET_DEFAULT, models.DO_NOTHING))))
else:
raise ValueError("on_delete argument encountered in Django version that does not support it")
# Gives information about how to introspect certain fields.
# This is a list of triples; the first item is a list of fields it applies to,
# (note that isinstance is used, so superclasses are perfectly valid here)
# the second is a list of positional argument descriptors, and the third
# is a list of keyword argument descriptors.
# Descriptors are of the form:
# [attrname, options]
# Where attrname is the attribute on the field to get the value from, and options
# is an optional dict.
#
# The introspector uses the combination of all matching entries, in order.
introspection_details = [
(
(models.Field, ),
[],
{
"null": ["null", {"default": False}],
"blank": ["blank", {"default": False, "ignore_if":"primary_key"}],
"primary_key": ["primary_key", {"default": False}],
"max_length": ["max_length", {"default": None}],
"unique": ["_unique", {"default": False}],
"db_index": ["db_index", {"default": False}],
"default": ["default", {"default": NOT_PROVIDED, "ignore_dynamics": True}],
"db_column": ["db_column", {"default": None}],
"db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_INDEX_TABLESPACE}],
},
),
(
(models.ForeignKey, models.OneToOneField),
[],
dict([
("to", ["rel.to", {}]),
("to_field", ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}]),
("related_name", ["rel.related_name", {"default": None}]),
("db_index", ["db_index", {"default": True}]),
("on_delete", ["rel.on_delete", {"default": getattr(models, "CASCADE", None), "is_django_function": True, "converter": convert_on_delete_handler, "ignore_missing": True}])
])
),
(
(models.ManyToManyField,),
[],
{
"to": ["rel.to", {}],
"symmetrical": ["rel.symmetrical", {"default": True}],
"related_name": ["rel.related_name", {"default": None}],
"db_table": ["db_table", {"default": None}],
# TODO: Kind of ugly to add this one-time-only option
"through": ["rel.through", {"ignore_if_auto_through": True}],
},
),
(
(models.DateField, models.TimeField),
[],
{
"auto_now": ["auto_now", {"default": False}],
"auto_now_add": ["auto_now_add", {"default": False}],
},
),
(
(models.DecimalField, ),
[],
{
"max_digits": ["max_digits", {"default": None}],
"decimal_places": ["decimal_places", {"default": None}],
},
),
(
(models.SlugField, ),
[],
{
"db_index": ["db_index", {"default": True}],
},
),
(
(models.BooleanField, ),
[],
{
"default": ["default", {"default": NOT_PROVIDED, "converter": bool}],
"blank": ["blank", {"default": True, "ignore_if":"primary_key"}],
},
),
(
(models.FilePathField, ),
[],
{
"path": ["path", {"default": ''}],
"match": ["match", {"default": None}],
"recursive": ["recursive", {"default": False}],
},
),
(
(generic.GenericRelation, ),
[],
{
"to": ["rel.to", {}],
"symmetrical": ["rel.symmetrical", {"default": True}],
"object_id_field": ["object_id_field_name", {"default": "object_id"}],
"content_type_field": ["content_type_field_name", {"default": "content_type"}],
"blank": ["blank", {"default": True}],
},
),
]
# Regexes of allowed field full paths
allowed_fields = [
"^django\.db",
"^django\.contrib\.contenttypes\.generic",
"^django\.contrib\.localflavor",
"^django_localflavor_\w\w",
]
# Regexes of ignored fields (custom fields which look like fields, but have no column behind them)
ignored_fields = [
"^django\.contrib\.contenttypes\.generic\.GenericRelation",
"^django\.contrib\.contenttypes\.generic\.GenericForeignKey",
]
# Similar, but for Meta, so just the inner level (kwds).
meta_details = {
"db_table": ["db_table", {"default_attr_concat": ["%s_%s", "app_label", "module_name"]}],
"db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_TABLESPACE}],
"unique_together": ["unique_together", {"default": []}],
"index_together": ["index_together", {"default": [], "ignore_missing": True}],
"ordering": ["ordering", {"default": []}],
"proxy": ["proxy", {"default": False, "ignore_missing": True}],
}
def add_introspection_rules(rules=[], patterns=[]):
"Allows you to add some introspection rules at runtime, e.g. for 3rd party apps."
assert isinstance(rules, (list, tuple))
assert isinstance(patterns, (list, tuple))
allowed_fields.extend(patterns)
introspection_details.extend(rules)
def add_ignored_fields(patterns):
"Allows you to add some ignore field patterns."
assert isinstance(patterns, (list, tuple))
ignored_fields.extend(patterns)
def can_ignore(field):
"""
Returns True if we know for certain that we can ignore this field, False
otherwise.
"""
full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__)
for regex in ignored_fields:
if re.match(regex, full_name):
return True
return False
def can_introspect(field):
"""
Returns True if we are allowed to introspect this field, False otherwise.
('allowed' means 'in core'. Custom fields can declare they are introspectable
by the default South rules by adding the attribute _south_introspects = True.)
"""
# Check for special attribute
if hasattr(field, "_south_introspects") and field._south_introspects:
return True
# Check it's an introspectable field
full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__)
for regex in allowed_fields:
if re.match(regex, full_name):
return True
return False
def matching_details(field):
"""
Returns the union of all matching entries in introspection_details for the field.
"""
our_args = []
our_kwargs = {}
for classes, args, kwargs in introspection_details:
if any([isinstance(field, x) for x in classes]):
our_args.extend(args)
our_kwargs.update(kwargs)
return our_args, our_kwargs
class IsDefault(Exception):
"""
Exception for when a field contains its default value.
"""
def get_value(field, descriptor):
"""
Gets an attribute value from a Field instance and formats it.
"""
attrname, options = descriptor
# If the options say it's not a attribute name but a real value, use that.
if options.get('is_value', False):
value = attrname
else:
try:
value = get_attribute(field, attrname)
except AttributeError:
if options.get("ignore_missing", False):
raise IsDefault
else:
raise
# Lazy-eval functions get eval'd.
if isinstance(value, Promise):
value = text_type(value)
# If the value is the same as the default, omit it for clarity
if "default" in options and value == options['default']:
raise IsDefault
# If there's an ignore_if, use it
if "ignore_if" in options:
if get_attribute(field, options['ignore_if']):
raise IsDefault
# If there's an ignore_if_auto_through which is True, use it
if options.get("ignore_if_auto_through", False):
if auto_through(field):
raise IsDefault
# Some default values need to be gotten from an attribute too.
if "default_attr" in options:
default_value = get_attribute(field, options['default_attr'])
if value == default_value:
raise IsDefault
# Some are made from a formatting string and several attrs (e.g. db_table)
if "default_attr_concat" in options:
format, attrs = options['default_attr_concat'][0], options['default_attr_concat'][1:]
default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs))
if value == default_value:
raise IsDefault
# Clean and return the value
return value_clean(value, options)
def value_clean(value, options={}):
"Takes a value and cleans it up (so e.g. it has timezone working right)"
# Lazy-eval functions get eval'd.
if isinstance(value, Promise):
value = text_type(value)
# Callables get called.
if not options.get('is_django_function', False) and callable(value) and not isinstance(value, ModelBase):
# Datetime.datetime.now is special, as we can access it from the eval
# context (and because it changes all the time; people will file bugs otherwise).
if value == datetime.datetime.now:
return "datetime.datetime.now"
elif value == datetime.datetime.utcnow:
return "datetime.datetime.utcnow"
elif value == datetime.date.today:
return "datetime.date.today"
# In case we use Django's own now function, revert to datetime's
# original one since we'll deal with timezones on our own.
elif timezone and value == timezone.now:
return "datetime.datetime.now"
# All other callables get called.
value = value()
# Models get their own special repr()
if isinstance(value, ModelBase):
# If it's a proxy model, follow it back to its non-proxy parent
if getattr(value._meta, "proxy", False):
value = value._meta.proxy_for_model
return "orm['%s.%s']" % (value._meta.app_label, value._meta.object_name)
# As do model instances
if isinstance(value, Model):
if options.get("ignore_dynamics", False):
raise IsDefault
return "orm['%s.%s'].objects.get(pk=%r)" % (value.__class__._meta.app_label, value.__class__._meta.object_name, value.pk)
# Make sure Decimal is converted down into a string
if isinstance(value, decimal.Decimal):
value = str(value)
# in case the value is timezone aware
datetime_types = (
datetime.datetime,
datetime.time,
datetime_safe.datetime,
)
if (timezone and isinstance(value, datetime_types) and
getattr(settings, 'USE_TZ', False) and
value is not None and timezone.is_aware(value)):
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
# datetime_safe has an improper repr value
if isinstance(value, datetime_safe.datetime):
value = datetime.datetime(*value.utctimetuple()[:7])
# converting a date value to a datetime to be able to handle
# timezones later gracefully
elif isinstance(value, (datetime.date, datetime_safe.date)):
value = datetime.datetime(*value.timetuple()[:3])
# Now, apply the converter func if there is one
if "converter" in options:
value = options['converter'](value)
# Return the final value
if options.get('is_django_function', False):
return value
else:
return repr(value)
def introspector(field):
"""
Given a field, introspects its definition triple.
"""
arg_defs, kwarg_defs = matching_details(field)
args = []
kwargs = {}
# For each argument, use the descriptor to get the real value.
for defn in arg_defs:
try:
args.append(get_value(field, defn))
except IsDefault:
pass
for kwd, defn in kwarg_defs.items():
try:
kwargs[kwd] = get_value(field, defn)
except IsDefault:
pass
return args, kwargs
def get_model_fields(model, m2m=False):
"""
Given a model class, returns a dict of {field_name: field_triple} defs.
"""
field_defs = SortedDict()
inherited_fields = {}
# Go through all bases (that are themselves models, but not Model)
for base in model.__bases__:
if hasattr(base, '_meta') and issubclass(base, models.Model):
if not base._meta.abstract:
# Looks like we need their fields, Ma.
inherited_fields.update(get_model_fields(base))
# Now, go through all the fields and try to get their definition
source = model._meta.local_fields[:]
if m2m:
source += model._meta.local_many_to_many
for field in source:
# Can we ignore it completely?
if can_ignore(field):
continue
# Does it define a south_field_triple method?
if hasattr(field, "south_field_triple"):
if NOISY:
print(" ( Nativing field: %s" % field.name)
field_defs[field.name] = field.south_field_triple()
# Can we introspect it?
elif can_introspect(field):
# Get the full field class path.
field_class = field.__class__.__module__ + "." + field.__class__.__name__
# Run this field through the introspector
args, kwargs = introspector(field)
# Workaround for Django bug #13987
if model._meta.pk.column == field.column and 'primary_key' not in kwargs:
kwargs['primary_key'] = True
# That's our definition!
field_defs[field.name] = (field_class, args, kwargs)
# Shucks, no definition!
else:
if NOISY:
print(" ( Nodefing field: %s" % field.name)
field_defs[field.name] = None
# If they've used the horrific hack that is order_with_respect_to, deal with
# it.
if model._meta.order_with_respect_to:
field_defs['_order'] = ("django.db.models.fields.IntegerField", [], {"default": "0"})
return field_defs
def get_model_meta(model):
"""
Given a model class, will return the dict representing the Meta class.
"""
# Get the introspected attributes
meta_def = {}
for kwd, defn in meta_details.items():
try:
meta_def[kwd] = get_value(model._meta, defn)
except IsDefault:
pass
# Also, add on any non-abstract model base classes.
# This is called _ormbases as the _bases variable was previously used
# for a list of full class paths to bases, so we can't conflict.
for base in model.__bases__:
if hasattr(base, '_meta') and issubclass(base, models.Model):
if not base._meta.abstract:
# OK, that matches our terms.
if "_ormbases" not in meta_def:
meta_def['_ormbases'] = []
meta_def['_ormbases'].append("%s.%s" % (
base._meta.app_label,
base._meta.object_name,
))
return meta_def
# Now, load the built-in South introspection plugins
import south.introspection_plugins

407
south/orm.py Normal file
View File

@ -0,0 +1,407 @@
"""
South's fake ORM; lets you not have to write SQL inside migrations.
Roughly emulates the real Django ORM, to a point.
"""
from __future__ import print_function
import inspect
from django.db import models
from django.db.models.loading import cache
from django.core.exceptions import ImproperlyConfigured
from south.db import db
from south.utils import ask_for_it_by_name, datetime_utils
from south.hacks import hacks
from south.exceptions import UnfreezeMeLater, ORMBaseNotIncluded, ImpossibleORMUnfreeze
from south.utils.py3 import string_types
class ModelsLocals(object):
"""
Custom dictionary-like class to be locals();
falls back to lowercase search for items that don't exist
(because we store model names as lowercase).
"""
def __init__(self, data):
self.data = data
def __getitem__(self, key):
try:
return self.data[key]
except KeyError:
return self.data[key.lower()]
# Stores already-created ORMs.
_orm_cache = {}
def FakeORM(*args):
"""
Creates a Fake Django ORM.
This is actually a memoised constructor; the real class is _FakeORM.
"""
if not args in _orm_cache:
_orm_cache[args] = _FakeORM(*args)
return _orm_cache[args]
class LazyFakeORM(object):
"""
In addition to memoising the ORM call, this function lazily generates them
for a Migration class. Assign the result of this to (for example)
.orm, and as soon as .orm is accessed the ORM will be created.
"""
def __init__(self, *args):
self._args = args
self.orm = None
def __get__(self, obj, type=None):
if not self.orm:
self.orm = FakeORM(*self._args)
return self.orm
class _FakeORM(object):
"""
Simulates the Django ORM at some point in time,
using a frozen definition on the Migration class.
"""
def __init__(self, cls, app):
self.default_app = app
self.cls = cls
# Try loading the models off the migration class; default to no models.
self.models = {}
try:
self.models_source = cls.models
except AttributeError:
return
# Start a 'new' AppCache
hacks.clear_app_cache()
# Now, make each model's data into a FakeModel
# We first make entries for each model that are just its name
# This allows us to have circular model dependency loops
model_names = []
for name, data in self.models_source.items():
# Make sure there's some kind of Meta
if "Meta" not in data:
data['Meta'] = {}
try:
app_label, model_name = name.split(".", 1)
except ValueError:
app_label = self.default_app
model_name = name
# If there's an object_name in the Meta, use it and remove it
if "object_name" in data['Meta']:
model_name = data['Meta']['object_name']
del data['Meta']['object_name']
name = "%s.%s" % (app_label, model_name)
self.models[name.lower()] = name
model_names.append((name.lower(), app_label, model_name, data))
# Loop until model_names is entry, or hasn't shrunk in size since
# last iteration.
# The make_model method can ask to postpone a model; it's then pushed
# to the back of the queue. Because this is currently only used for
# inheritance, it should thus theoretically always decrease by one.
last_size = None
while model_names:
# First, make sure we've shrunk.
if len(model_names) == last_size:
raise ImpossibleORMUnfreeze()
last_size = len(model_names)
# Make one run through
postponed_model_names = []
for name, app_label, model_name, data in model_names:
try:
self.models[name] = self.make_model(app_label, model_name, data)
except UnfreezeMeLater:
postponed_model_names.append((name, app_label, model_name, data))
# Reset
model_names = postponed_model_names
# And perform the second run to iron out any circular/backwards depends.
self.retry_failed_fields()
# Force evaluation of relations on the models now
for model in self.models.values():
model._meta.get_all_field_names()
# Reset AppCache
hacks.unclear_app_cache()
def __iter__(self):
return iter(self.models.values())
def __getattr__(self, key):
fullname = (self.default_app+"."+key).lower()
try:
return self.models[fullname]
except KeyError:
raise AttributeError("The model '%s' from the app '%s' is not available in this migration. (Did you use orm.ModelName, not orm['app.ModelName']?)" % (key, self.default_app))
def __getitem__(self, key):
# Detect if they asked for a field on a model or not.
if ":" in key:
key, fname = key.split(":")
else:
fname = None
# Now, try getting the model
key = key.lower()
try:
model = self.models[key]
except KeyError:
try:
app, model = key.split(".", 1)
except ValueError:
raise KeyError("The model '%s' is not in appname.modelname format." % key)
else:
raise KeyError("The model '%s' from the app '%s' is not available in this migration." % (model, app))
# If they asked for a field, get it.
if fname:
return model._meta.get_field_by_name(fname)[0]
else:
return model
def eval_in_context(self, code, app, extra_imports={}):
"Evaluates the given code in the context of the migration file."
# Drag in the migration module's locals (hopefully including models.py)
# excluding all models from that (i.e. from modern models.py), to stop pollution
fake_locals = dict(
(key, value)
for key, value in inspect.getmodule(self.cls).__dict__.items()
if not (
isinstance(value, type)
and issubclass(value, models.Model)
and hasattr(value, "_meta")
)
)
# We add our models into the locals for the eval
fake_locals.update(dict([
(name.split(".")[-1], model)
for name, model in self.models.items()
]))
# Make sure the ones for this app override.
fake_locals.update(dict([
(name.split(".")[-1], model)
for name, model in self.models.items()
if name.split(".")[0] == app
]))
# Ourselves as orm, to allow non-fail cross-app referencing
fake_locals['orm'] = self
# And a fake _ function
fake_locals['_'] = lambda x: x
# Datetime; there should be no datetime direct accesses
fake_locals['datetime'] = datetime_utils
# Now, go through the requested imports and import them.
for name, value in extra_imports.items():
# First, try getting it out of locals.
parts = value.split(".")
try:
obj = fake_locals[parts[0]]
for part in parts[1:]:
obj = getattr(obj, part)
except (KeyError, AttributeError):
pass
else:
fake_locals[name] = obj
continue
# OK, try to import it directly
try:
fake_locals[name] = ask_for_it_by_name(value)
except ImportError:
if name == "SouthFieldClass":
raise ValueError("Cannot import the required field '%s'" % value)
else:
print("WARNING: Cannot import '%s'" % value)
# Use ModelsLocals to make lookups work right for CapitalisedModels
fake_locals = ModelsLocals(fake_locals)
return eval(code, globals(), fake_locals)
def make_meta(self, app, model, data, stub=False):
"Makes a Meta class out of a dict of eval-able arguments."
results = {'app_label': app}
for key, code in data.items():
# Some things we never want to use.
if key in ["_bases", "_ormbases"]:
continue
# Some things we don't want with stubs.
if stub and key in ["order_with_respect_to"]:
continue
# OK, add it.
try:
results[key] = self.eval_in_context(code, app)
except (NameError, AttributeError) as e:
raise ValueError("Cannot successfully create meta field '%s' for model '%s.%s': %s." % (
key, app, model, e
))
return type("Meta", tuple(), results)
def make_model(self, app, name, data):
"Makes a Model class out of the given app name, model name and pickled data."
# Extract any bases out of Meta
if "_ormbases" in data['Meta']:
# Make sure everything we depend on is done already; otherwise, wait.
for key in data['Meta']['_ormbases']:
key = key.lower()
if key not in self.models:
raise ORMBaseNotIncluded("Cannot find ORM base %s" % key)
elif isinstance(self.models[key], string_types):
# Then the other model hasn't been unfrozen yet.
# We postpone ourselves; the situation will eventually resolve.
raise UnfreezeMeLater()
bases = [self.models[key.lower()] for key in data['Meta']['_ormbases']]
# Perhaps the old style?
elif "_bases" in data['Meta']:
bases = map(ask_for_it_by_name, data['Meta']['_bases'])
# Ah, bog standard, then.
else:
bases = [models.Model]
# Turn the Meta dict into a basic class
meta = self.make_meta(app, name, data['Meta'], data.get("_stub", False))
failed_fields = {}
fields = {}
stub = False
# Now, make some fields!
for fname, params in data.items():
# If it's the stub marker, ignore it.
if fname == "_stub":
stub = bool(params)
continue
elif fname == "Meta":
continue
elif not params:
raise ValueError("Field '%s' on model '%s.%s' has no definition." % (fname, app, name))
elif isinstance(params, string_types):
# It's a premade definition string! Let's hope it works...
code = params
extra_imports = {}
else:
# If there's only one parameter (backwards compat), make it 3.
if len(params) == 1:
params = (params[0], [], {})
# There should be 3 parameters. Code is a tuple of (code, what-to-import)
if len(params) == 3:
code = "SouthFieldClass(%s)" % ", ".join(
params[1] +
["%s=%s" % (n, v) for n, v in params[2].items()]
)
extra_imports = {"SouthFieldClass": params[0]}
else:
raise ValueError("Field '%s' on model '%s.%s' has a weird definition length (should be 1 or 3 items)." % (fname, app, name))
try:
# Execute it in a probably-correct context.
field = self.eval_in_context(code, app, extra_imports)
except (NameError, AttributeError, AssertionError, KeyError):
# It might rely on other models being around. Add it to the
# model for the second pass.
failed_fields[fname] = (code, extra_imports)
else:
fields[fname] = field
# Find the app in the Django core, and get its module
more_kwds = {}
try:
app_module = models.get_app(app)
more_kwds['__module__'] = app_module.__name__
except ImproperlyConfigured:
# The app this belonged to has vanished, but thankfully we can still
# make a mock model, so ignore the error.
more_kwds['__module__'] = '_south_mock'
more_kwds['Meta'] = meta
# Make our model
fields.update(more_kwds)
model = type(
str(name),
tuple(bases),
fields,
)
# If this is a stub model, change Objects to a whiny class
if stub:
model.objects = WhinyManager()
# Also, make sure they can't instantiate it
model.__init__ = whiny_method
else:
model.objects = NoDryRunManager(model.objects)
if failed_fields:
model._failed_fields = failed_fields
return model
def retry_failed_fields(self):
"Tries to re-evaluate the _failed_fields for each model."
for modelkey, model in self.models.items():
app, modelname = modelkey.split(".", 1)
if hasattr(model, "_failed_fields"):
for fname, (code, extra_imports) in model._failed_fields.items():
try:
field = self.eval_in_context(code, app, extra_imports)
except (NameError, AttributeError, AssertionError, KeyError) as e:
# It's failed again. Complain.
raise ValueError("Cannot successfully create field '%s' for model '%s': %s." % (
fname, modelname, e
))
else:
# Startup that field.
model.add_to_class(fname, field)
class WhinyManager(object):
"A fake manager that whines whenever you try to touch it. For stub models."
def __getattr__(self, key):
raise AttributeError("You cannot use items from a stub model.")
class NoDryRunManager(object):
"""
A manager that always proxies through to the real manager,
unless a dry run is in progress.
"""
def __init__(self, real):
self.real = real
def __getattr__(self, name):
if db.dry_run:
raise AttributeError("You are in a dry run, and cannot access the ORM.\nWrap ORM sections in 'if not db.dry_run:', or if the whole migration is only a data migration, set no_dry_run = True on the Migration class.")
return getattr(self.real, name)
def whiny_method(*a, **kw):
raise ValueError("You cannot instantiate a stub model.")

24
south/signals.py Normal file
View File

@ -0,0 +1,24 @@
"""
South-specific signals
"""
from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app", "verbosity", "interactive", "db"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app", "verbosity", "interactive", "db"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app", "migration", "method", "verbosity", "interactive", "db"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)
#if 'django.contrib.auth' in settings.INSTALLED_APPS:
#def create_permissions_compat(app, **kwargs):
#from django.db.models import get_app
#from django.contrib.auth.management import create_permissions
#create_permissions(get_app(app), (), 0)
#post_migrate.connect(create_permissions_compat)

6
south/test_shim.py Normal file
View File

@ -0,0 +1,6 @@
"""
This file is needed as 1.6 only finds tests in files labelled test_*,
and ignores tests/__init__.py.
"""
from south.tests import *

109
south/tests/__init__.py Normal file
View File

@ -0,0 +1,109 @@
from __future__ import print_function
#import unittest
import os
import sys
from functools import wraps
from django.conf import settings
from south.hacks import hacks
# Make sure skipping tests is available.
try:
# easiest and best is unittest included in Django>=1.3
from django.utils import unittest
except ImportError:
# earlier django... use unittest from stdlib
import unittest
# however, skipUnless was only added in Python 2.7;
# if not available, we need to do something else
try:
skipUnless = unittest.skipUnless #@UnusedVariable
except AttributeError:
def skipUnless(condition, message):
def decorator(testfunc):
@wraps(testfunc)
def wrapper(self):
if condition:
# Apply method
testfunc(self)
else:
# The skip exceptions are not available either...
print("Skipping", testfunc.__name__,"--", message)
return wrapper
return decorator
# ditto for skipIf
try:
skipIf = unittest.skipIf #@UnusedVariable
except AttributeError:
def skipIf(condition, message):
def decorator(testfunc):
@wraps(testfunc)
def wrapper(self):
if condition:
print("Skipping", testfunc.__name__,"--", message)
else:
# Apply method
testfunc(self)
return wrapper
return decorator
# Add the tests directory so fakeapp is on sys.path
test_root = os.path.dirname(__file__)
sys.path.append(test_root)
# Note: the individual test files are imported below this.
class Monkeypatcher(unittest.TestCase):
"""
Base test class for tests that play with the INSTALLED_APPS setting at runtime.
"""
def create_fake_app(self, name):
class Fake:
pass
fake = Fake()
fake.__name__ = name
try:
fake.migrations = __import__(name + ".migrations", {}, {}, ['migrations'])
except ImportError:
pass
return fake
def setUp(self):
"""
Changes the Django environment so we can run tests against our test apps.
"""
if hasattr(self, 'installed_apps'):
hacks.store_app_cache_state()
hacks.set_installed_apps(self.installed_apps)
# Make sure dependencies are calculated for new apps
Migrations._dependencies_done = False
def tearDown(self):
"""
Undoes what setUp did.
"""
if hasattr(self, 'installed_apps'):
hacks.reset_installed_apps()
hacks.restore_app_cache_state()
# Try importing all tests if asked for (then we can run 'em)
try:
skiptest = settings.SKIP_SOUTH_TESTS
except:
skiptest = True
if not skiptest:
from south.tests.db import *
from south.tests.db_mysql import *
from south.tests.db_firebird import *
from south.tests.logic import *
from south.tests.autodetection import *
from south.tests.logger import *
from south.tests.inspector import *
from south.tests.freezer import *

View File

@ -0,0 +1,360 @@
from south.tests import unittest
from south.creator.changes import AutoChanges, InitialChanges
from south.migration.base import Migrations
from south.tests import Monkeypatcher
from south.creator import freezer
from south.orm import FakeORM
from south.v2 import SchemaMigration
try:
from django.utils.six.moves import reload_module
except ImportError:
# Older django, no python3 support
reload_module = reload
class TestComparison(unittest.TestCase):
"""
Tests the comparison methods of startmigration.
"""
def test_no_change(self):
"Test with a completely unchanged definition."
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}),
('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {'to': "somewhere", "from": "there"}),
('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {"from": "there", 'to': "somewhere"}),
),
False,
)
def test_pos_change(self):
"Test with a changed positional argument."
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['hi'], {'to': "foo"}),
('django.db.models.fields.CharField', [], {'to': "foo"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', [], {'to': "foo"}),
('django.db.models.fields.CharField', ['bye'], {'to': "foo"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['pisdadad'], {'to': "foo"}),
('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['hi'], {}),
('django.db.models.fields.CharField', [], {}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', [], {}),
('django.db.models.fields.CharField', ['bye'], {}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['pi'], {}),
('django.db.models.fields.CharField', ['pi'], {}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['pi'], {}),
('django.db.models.fields.CharField', ['45fdfdf'], {}),
),
True,
)
def test_kwd_change(self):
"Test a changed keyword argument"
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
('django.db.models.fields.CharField', ['pi'], {'to': "blue"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', [], {'to': "foo"}),
('django.db.models.fields.CharField', [], {'to': "blue"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['b'], {'to': "foo"}),
('django.db.models.fields.CharField', ['b'], {'to': "blue"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', [], {'to': "foo"}),
('django.db.models.fields.CharField', [], {}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['a'], {'to': "foo"}),
('django.db.models.fields.CharField', ['a'], {}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', [], {}),
('django.db.models.fields.CharField', [], {'to': "foo"}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('django.db.models.fields.CharField', ['a'], {}),
('django.db.models.fields.CharField', ['a'], {'to': "foo"}),
),
True,
)
def test_backcompat_nochange(self):
"Test that the backwards-compatable comparison is working"
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', [], {}),
('django.db.models.fields.CharField', [], {}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['ack'], {}),
('django.db.models.fields.CharField', ['ack'], {}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', [], {'to':'b'}),
('django.db.models.fields.CharField', [], {'to':'b'}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['hah'], {'to':'you'}),
('django.db.models.fields.CharField', ['hah'], {'to':'you'}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['hah'], {'to':'you'}),
('django.db.models.fields.CharField', ['hah'], {'to':'heh'}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['hah'], {}),
('django.db.models.fields.CharField', [], {'to':"orm['appname.hah']"}),
),
False,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['hah'], {}),
('django.db.models.fields.CharField', [], {'to':'hah'}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['hah'], {}),
('django.db.models.fields.CharField', [], {'to':'rrr'}),
),
True,
)
self.assertEqual(
AutoChanges.different_attributes(
('models.CharField', ['hah'], {}),
('django.db.models.fields.IntField', [], {'to':'hah'}),
),
True,
)
class TestNonManagedIgnored(Monkeypatcher):
installed_apps = ["non_managed"]
full_defs = {
'non_managed.legacy': {
'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {})
}
}
def test_not_added_init(self):
migrations = Migrations("non_managed")
changes = InitialChanges(migrations)
change_list = changes.get_changes()
if list(change_list):
self.fail("Initial migration creates table for non-managed model")
def test_not_added_auto(self):
empty_defs = { }
class EmptyMigration(SchemaMigration):
"Serves as fake previous migration"
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = empty_defs
complete_apps = ['non_managed']
migrations = Migrations("non_managed")
empty_orm = FakeORM(EmptyMigration, "non_managed")
changes = AutoChanges(
migrations = migrations,
old_defs = empty_defs,
old_orm = empty_orm,
new_defs = self.full_defs,
)
change_list = changes.get_changes()
if list(change_list):
self.fail("Auto migration creates table for non-managed model")
def test_not_deleted_auto(self):
empty_defs = { }
old_defs = freezer.freeze_apps(["non_managed"])
class InitialMigration(SchemaMigration):
"Serves as fake previous migration"
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = self.full_defs
complete_apps = ['non_managed']
migrations = Migrations("non_managed")
initial_orm = FakeORM(InitialMigration, "non_managed")
changes = AutoChanges(
migrations = migrations,
old_defs = self.full_defs,
old_orm = initial_orm,
new_defs = empty_defs,
)
change_list = changes.get_changes()
if list(change_list):
self.fail("Auto migration deletes table for non-managed model")
def test_not_modified_auto(self):
fake_defs = {
'non_managed.legacy': {
'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
#'size': ('django.db.models.fields.IntegerField', [], {}) # The "change" is the addition of this field
}
}
class InitialMigration(SchemaMigration):
"Serves as fake previous migration"
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = fake_defs
complete_apps = ['non_managed']
from non_managed import models as dummy_import_to_force_loading_models # TODO: Does needing this indicate a bug in MokeyPatcher?
reload_module(dummy_import_to_force_loading_models) # really force...
migrations = Migrations("non_managed")
initial_orm = FakeORM(InitialMigration, "non_managed")
changes = AutoChanges(
migrations = migrations,
old_defs = fake_defs,
old_orm = initial_orm,
new_defs = self.full_defs
)
change_list = changes.get_changes()
if list(change_list):
self.fail("Auto migration changes table for non-managed model")

View File

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('unknown', '0001_initial')]
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('fakeapp', '9999_unknown')]
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('brokenapp', '0004_higher')]
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,55 @@
# -*- coding: UTF-8 -*-
from django.db import models
from django.contrib.auth.models import User as UserAlias
def default_func():
return "yays"
# An empty case.
class Other1(models.Model): pass
# Nastiness.
class HorribleModel(models.Model):
"A model to test the edge cases of model parsing"
ZERO, ONE = range(2)
# First, some nice fields
name = models.CharField(max_length=255)
short_name = models.CharField(max_length=50)
slug = models.SlugField(unique=True)
# A ForeignKey, to a model above, and then below
o1 = models.ForeignKey(Other1)
o2 = models.ForeignKey('Other2')
# Now to something outside
user = models.ForeignKey(UserAlias, related_name="horribles")
# Unicode!
code = models.CharField(max_length=25, default="↑↑↓↓←→←→BA")
# Odd defaults!
class_attr = models.IntegerField(default=ZERO)
func = models.CharField(max_length=25, default=default_func)
# Time to get nasty. Define a non-field choices, and use it
choices = [('hello', '1'), ('world', '2')]
choiced = models.CharField(max_length=20, choices=choices)
class Meta:
db_table = "my_fave"
verbose_name = "Dr. Strangelove," + \
"""or how I learned to stop worrying
and love the bomb"""
# Now spread over multiple lines
multiline = \
models.TextField(
)
# Special case.
class Other2(models.Model):
# Try loading a field without a newline after it (inspect hates this)
close_but_no_cigar = models.PositiveIntegerField(primary_key=True)

View File

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('circular_b', '0001_first')]
def forwards(self):
pass
def backwards(self):
pass

View File

View File

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('circular_a', '0001_first')]
def forwards(self):
pass
def backwards(self):
pass

View File

1060
south/tests/db.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,39 @@
from django.db import models
from south.db import db
from south.tests import unittest, skipUnless
class FirebirdTests(unittest.TestCase):
"""
Tests firebird related issues
"""
def setUp(self):
print('=' * 80)
print('Begin Firebird test')
def tearDown(self):
print('End Firebird test')
print('=' * 80)
@skipUnless(db.backend_name == "firebird", "Firebird-only test")
def test_firebird_double_index_creation_1317(self):
"""
Tests foreign key creation, especially uppercase (see #61)
"""
Test = db.mock_model(model_name='Test',
db_table='test5a',
db_tablespace='',
pk_field_name='ID',
pk_field_type=models.AutoField,
pk_field_args=[]
)
db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
db.create_table("test5b", [
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('UNIQUE', models.ForeignKey(Test)),
])
db.execute_deferred_sql()

164
south/tests/db_mysql.py Normal file
View File

@ -0,0 +1,164 @@
# Additional MySQL-specific tests
# Written by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
# Based on tests by: aarranz
from south.tests import unittest, skipUnless
from south.db import db, generic, mysql
from django.db import connection, models
from south.utils.py3 import with_metaclass
# A class decoration may be used in lieu of this when Python 2.5 is the
# minimum.
class TestMySQLOperationsMeta(type):
def __new__(mcs, name, bases, dict_):
decorator = skipUnless(db.backend_name == "mysql", 'MySQL-specific tests')
for key, method in dict_.items():
if key.startswith('test'):
dict_[key] = decorator(method)
return type.__new__(mcs, name, bases, dict_)
class TestMySQLOperations(with_metaclass(TestMySQLOperationsMeta, unittest.TestCase)):
"""MySQL-specific tests"""
def setUp(self):
db.debug = False
db.clear_deferred_sql()
def tearDown(self):
pass
def _create_foreign_tables(self, main_name, reference_name):
# Create foreign table and model
Foreign = db.mock_model(model_name='Foreign', db_table=reference_name,
db_tablespace='', pk_field_name='id',
pk_field_type=models.AutoField,
pk_field_args=[])
db.create_table(reference_name, [
('id', models.AutoField(primary_key=True)),
])
# Create table with foreign key
db.create_table(main_name, [
('id', models.AutoField(primary_key=True)),
('foreign', models.ForeignKey(Foreign)),
])
return Foreign
def test_constraint_references(self):
"""Tests that referred table is reported accurately"""
main_table = 'test_cns_ref'
reference_table = 'test_cr_foreign'
db.start_transaction()
self._create_foreign_tables(main_table, reference_table)
db.execute_deferred_sql()
constraint = db._find_foreign_constraints(main_table, 'foreign_id')[0]
references = db._lookup_constraint_references(main_table, constraint)
self.assertEquals((reference_table, 'id'), references)
db.delete_table(main_table)
db.delete_table(reference_table)
def test_reverse_column_constraint(self):
"""Tests that referred column in a foreign key (ex. id) is found"""
main_table = 'test_reverse_ref'
reference_table = 'test_rr_foreign'
db.start_transaction()
self._create_foreign_tables(main_table, reference_table)
db.execute_deferred_sql()
inverse = db._lookup_reverse_constraint(reference_table, 'id')
(cname, rev_table, rev_column) = inverse[0]
self.assertEquals(main_table, rev_table)
self.assertEquals('foreign_id', rev_column)
db.delete_table(main_table)
db.delete_table(reference_table)
def test_delete_fk_column(self):
main_table = 'test_drop_foreign'
ref_table = 'test_df_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
db.delete_column(main_table, 'foreign_id')
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 0)
db.delete_table(main_table)
db.delete_table(ref_table)
def test_rename_fk_column(self):
main_table = 'test_rename_foreign'
ref_table = 'test_rf_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
db.rename_column(main_table, 'foreign_id', 'reference_id')
db.execute_deferred_sql() #Create constraints
constraints = db._find_foreign_constraints(main_table, 'reference_id')
self.assertEquals(len(constraints), 1)
db.delete_table(main_table)
db.delete_table(ref_table)
def test_rename_fk_inbound(self):
"""
Tests that the column referred to by an external column can be renamed.
Edge case, but also useful as stepping stone to renaming tables.
"""
main_table = 'test_rename_fk_inbound'
ref_table = 'test_rfi_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._lookup_reverse_constraint(ref_table, 'id')
self.assertEquals(len(constraints), 1)
db.rename_column(ref_table, 'id', 'rfi_id')
db.execute_deferred_sql() #Create constraints
constraints = db._lookup_reverse_constraint(ref_table, 'rfi_id')
self.assertEquals(len(constraints), 1)
cname = db._find_foreign_constraints(main_table, 'foreign_id')[0]
(rtable, rcolumn) = db._lookup_constraint_references(main_table, cname)
self.assertEquals(rcolumn, 'rfi_id')
db.delete_table(main_table)
db.delete_table(ref_table)
def test_rename_constrained_table(self):
"""Renames a table with a foreign key column (towards another table)"""
main_table = 'test_rn_table'
ref_table = 'test_rt_ref'
renamed_table = 'test_renamed_table'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
db.rename_table(main_table, renamed_table)
db.execute_deferred_sql() #Create constraints
constraints = db._find_foreign_constraints(renamed_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
(rtable, rcolumn) = db._lookup_constraint_references(
renamed_table, constraints[0])
self.assertEquals(rcolumn, 'id')
db.delete_table(renamed_table)
db.delete_table(ref_table)
def test_renamed_referenced_table(self):
"""Rename a table referred to in a foreign key"""
main_table = 'test_rn_refd_table'
ref_table = 'test_rrt_ref'
renamed_table = 'test_renamed_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._lookup_reverse_constraint(ref_table)
self.assertEquals(len(constraints), 1)
db.rename_table(ref_table, renamed_table)
db.execute_deferred_sql() #Create constraints
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
(rtable, rcolumn) = db._lookup_constraint_references(
main_table, constraints[0])
self.assertEquals(renamed_table, rtable)
db.delete_table(main_table)
db.delete_table(renamed_table)

View File

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('deps_b', '0003_b')]
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

View File

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('deps_a', '0002_a')]
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('deps_a', '0003_a')]
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

View File

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,11 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass

View File

@ -0,0 +1,13 @@
from south.db import db
from django.db import models
class Migration:
depends_on = [('deps_a', '0002_a')]
def forwards(self):
pass
def backwards(self):
pass

View File

View File

View File

View File

View File

@ -0,0 +1,17 @@
from south.db import db
from django.db import models
class Migration:
def forwards(self):
# Model 'Spam'
db.create_table("southtest_spam", (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('weight', models.FloatField()),
('expires', models.DateTimeField()),
('name', models.CharField(max_length=255))
))
def backwards(self):
db.delete_table("southtest_spam")

Some files were not shown because too many files have changed in this diff Show More