from threading import local
import warnings
from django . conf import settings
from django . core import signals
from django . core . cache . backends . base import ( InvalidCacheBackendError , CacheKeyWarning , BaseCache )
from django . core . exceptions import ImproperlyConfigured
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . module_loading import import_string
__all__ = [ 'get_cache' , 'cache' , 'DEFAULT_CACHE_ALIAS' , 'InvalidCacheBackendError' , 'CacheKeyWarning' , 'BaseCache' , ]
DEFAULT_CACHE_ALIAS = 'default'
if DEFAULT_CACHE_ALIAS not in settings . CACHES :
raise ImproperlyConfigured ( "You must define a '%s' cache" % DEFAULT_CACHE_ALIAS )
def get_cache ( backend , ** kwargs ) :
warnings . warn ( "'get_cache' is deprecated in favor of 'caches'." , RemovedInDjango19Warning , stacklevel = 2 )
cache = _create_cache ( backend , ** kwargs )
signals . request_finished . connect ( cache . close )
return cache
def _create_cache ( backend , ** kwargs ) :
try :
try :
conf = settings . CACHES [ backend ]
except KeyError :
try :
import_string ( backend )
except ImportError as e :
raise InvalidCacheBackendError ( "Could not find backend '%s': %s" % ( backend , e ) )
location = kwargs . pop ( 'LOCATION' , '' )
params = kwargs
else :
params = conf . copy ( )
params . update ( kwargs )
backend = params . pop ( 'BACKEND' )
location = params . pop ( 'LOCATION' , '' )
backend_cls = import_string ( backend )
except ImportError as e :
raise InvalidCacheBackendError ( "Could not find backend '%s': %s" % ( backend , e ) )
return backend_cls ( location , params )
class CacheHandler ( object ) :
def __init__ ( self ) :
self . _caches = local ( )
def __getitem__ ( self , alias ) :
try :
return self . _caches . caches [ alias ]
except AttributeError :
self . _caches . caches = { }
except KeyError :
pass
if alias not in settings . CACHES :
raise InvalidCacheBackendError ( "Could not find config for '%s' in settings.CACHES" % alias )
cache = _create_cache ( alias )
self . _caches . caches [ alias ] = cache
return cache
def all ( self ) :
return getattr ( self . _caches , 'caches' , { } ) . values ( )
caches = CacheHandler ( )
class DefaultCacheProxy ( object ) :
def __getattr__ ( self , name ) :
return getattr ( caches [ DEFAULT_CACHE_ALIAS ] , name )
def __setattr__ ( self , name , value ) :
return setattr ( caches [ DEFAULT_CACHE_ALIAS ] , name , value )
def __delattr__ ( self , name ) :
return delattr ( caches [ DEFAULT_CACHE_ALIAS ] , name )
def __contains__ ( self , key ) :
return key in caches [ DEFAULT_CACHE_ALIAS ]
def __eq__ ( self , other ) :
return caches [ DEFAULT_CACHE_ALIAS ] == other
def __ne__ ( self , other ) :
return caches [ DEFAULT_CACHE_ALIAS ] != other
cache = DefaultCacheProxy ( )
def close_caches ( ** kwargs ) :
for cache in caches . all ( ) :
cache . close ( )
signals . request_finished . connect ( close_caches )
from __future__ import unicode_literals
import time
import warnings
from django . core . exceptions import ImproperlyConfigured , DjangoRuntimeWarning
from django . utils . module_loading import import_string
class InvalidCacheBackendError ( ImproperlyConfigured ) :
pass
class CacheKeyWarning ( DjangoRuntimeWarning ) :
pass
DEFAULT_TIMEOUT = object ( )
MEMCACHE_MAX_KEY_LENGTH = 250
def default_key_func ( key , key_prefix , version ) :
return '%s:%s:%s' % ( key_prefix , version , key )
def get_key_func ( key_func ) :
if key_func is not None :
if callable ( key_func ) :
return key_func
else :
return import_string ( key_func )
return default_key_func
class BaseCache ( object ) :
def __init__ ( self , params ) :
timeout = params . get ( 'timeout' , params . get ( 'TIMEOUT' , 300 ) )
if timeout is not None :
try :
timeout = int ( timeout )
except ( ValueError , TypeError ) :
timeout = 300
self . default_timeout = timeout
options = params . get ( 'OPTIONS' , { } )
max_entries = params . get ( 'max_entries' , options . get ( 'MAX_ENTRIES' , 300 ) )
try :
self . _max_entries = int ( max_entries )
except ( ValueError , TypeError ) :
self . _max_entries = 300
cull_frequency = params . get ( 'cull_frequency' , options . get ( 'CULL_FREQUENCY' , 3 ) )
try :
self . _cull_frequency = int ( cull_frequency )
except ( ValueError , TypeError ) :
self . _cull_frequency = 3
self . key_prefix = params . get ( 'KEY_PREFIX' , '' )
self . version = params . get ( 'VERSION' , 1 )
self . key_func = get_key_func ( params . get ( 'KEY_FUNCTION' , None ) )
def get_backend_timeout ( self , timeout = DEFAULT_TIMEOUT ) :
if timeout == DEFAULT_TIMEOUT :
timeout = self . default_timeout
elif timeout == 0 :
timeout = - 1
return None if timeout is None else time . time ( ) + timeout
def make_key ( self , key , version = None ) :
if version is None :
version = self . version
new_key = self . key_func ( key , self . key_prefix , version )
return new_key
def add ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
""" Set a value in the cache if the key does not already exist. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. Returns True if the value was stored, False otherwise. """
raise NotImplementedError ( 'subclasses of BaseCache must provide an add() method' )
def get ( self , key , default = None , version = None ) :
raise NotImplementedError ( 'subclasses of BaseCache must provide a get() method' )
def set ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
raise NotImplementedError ( 'subclasses of BaseCache must provide a set() method' )
def delete ( self , key , version = None ) :
raise NotImplementedError ( 'subclasses of BaseCache must provide a delete() method' )
def get_many ( self , keys , version = None ) :
d = { }
for k in keys :
val = self . get ( k , version = version )
if val is not None :
d [ k ] = val
return d
def has_key ( self , key , version = None ) :
return self . get ( key , version = version ) is not None
def incr ( self , key , delta = 1 , version = None ) :
value = self . get ( key , version = version )
if value is None :
raise ValueError ( "Key '%s' not found" % key )
new_value = value + delta
self . set ( key , new_value , version = version )
return new_value
def decr ( self , key , delta = 1 , version = None ) :
return self . incr ( key , - delta , version = version )
def __contains__ ( self , key ) :
return self . has_key ( key )
def set_many ( self , data , timeout = DEFAULT_TIMEOUT , version = None ) :
for key , value in data . items ( ) :
self . set ( key , value , timeout = timeout , version = version )
def delete_many ( self , keys , version = None ) :
for key in keys :
self . delete ( key , version = version )
def clear ( self ) :
raise NotImplementedError ( 'subclasses of BaseCache must provide a clear() method' )
def validate_key ( self , key ) :
if len ( key ) > MEMCACHE_MAX_KEY_LENGTH :
warnings . warn ( 'Cache key will cause errors if used with memcached: ' '%s (longer than %s)' % ( key , MEMCACHE_MAX_KEY_LENGTH ) , CacheKeyWarning )
for char in key :
if ord ( char ) < 33 or ord ( char ) == 127 :
warnings . warn ( 'Cache key contains characters that will cause ' 'errors if used with memcached: %r' % key , CacheKeyWarning )
def incr_version ( self , key , delta = 1 , version = None ) :
if version is None :
version = self . version
value = self . get ( key , version = version )
if value is None :
raise ValueError ( "Key '%s' not found" % key )
self . set ( key , value , version = version + delta )
self . delete ( key , version = version )
return version + delta
def decr_version ( self , key , delta = 1 , version = None ) :
return self . incr_version ( key , - delta , version )
def close ( self , ** kwargs ) :
"""Close the cache connection"""
pass
import base64
from datetime import datetime
try :
from django . utils . six . moves import cPickle as pickle
except ImportError :
import pickle
from django . conf import settings
from django . core . cache . backends . base import BaseCache , DEFAULT_TIMEOUT
from django . db import connections , transaction , router , DatabaseError
from django . db . backends . utils import typecast_timestamp
from django . utils import timezone , six
from django . utils . encoding import force_bytes
class Options ( object ) :
def __init__ ( self , table ) :
self . db_table = table
self . app_label = 'django_cache'
self . model_name = 'cacheentry'
self . verbose_name = 'cache entry'
self . verbose_name_plural = 'cache entries'
self . object_name = 'CacheEntry'
self . abstract = False
self . managed = True
self . proxy = False
class BaseDatabaseCache ( BaseCache ) :
def __init__ ( self , table , params ) :
BaseCache . __init__ ( self , params )
self . _table = table
class CacheEntry ( object ) :
_meta = Options ( table )
self . cache_model_class = CacheEntry
class DatabaseCache ( BaseDatabaseCache ) :
def get ( self , key , default = None , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
db = router . db_for_read ( self . cache_model_class )
table = connections [ db ] . ops . quote_name ( self . _table )
with connections [ db ] . cursor ( ) as cursor :
cursor . execute ( "SELECT cache_key, value, expires FROM %s " "WHERE cache_key = %%s" % table , [ key ] )
row = cursor . fetchone ( )
if row is None :
return default
now = timezone . now ( )
expires = row [ 2 ]
if connections [ db ] . features . needs_datetime_string_cast and not isinstance ( expires , datetime ) :
expires = typecast_timestamp ( str ( expires ) )
if expires < now :
db = router . db_for_write ( self . cache_model_class )
with connections [ db ] . cursor ( ) as cursor :
cursor . execute ( "DELETE FROM %s " "WHERE cache_key = %%s" % table , [ key ] )
return default
value = connections [ db ] . ops . process_clob ( row [ 1 ] )
return pickle . loads ( base64 . b64decode ( force_bytes ( value ) ) )
def set ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
self . _base_set ( 'set' , key , value , timeout )
def add ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
return self . _base_set ( 'add' , key , value , timeout )
def _base_set ( self , mode , key , value , timeout = DEFAULT_TIMEOUT ) :
timeout = self . get_backend_timeout ( timeout )
db = router . db_for_write ( self . cache_model_class )
table = connections [ db ] . ops . quote_name ( self . _table )
with connections [ db ] . cursor ( ) as cursor :
cursor . execute ( "SELECT COUNT(*) FROM %s" % table )
num = cursor . fetchone ( ) [ 0 ]
now = timezone . now ( )
now = now . replace ( microsecond = 0 )
if timeout is None :
exp = datetime . max
elif settings . USE_TZ :
exp = datetime . utcfromtimestamp ( timeout )
else :
exp = datetime . fromtimestamp ( timeout )
exp = exp . replace ( microsecond = 0 )
if num > self . _max_entries :
self . _cull ( db , cursor , now )
pickled = pickle . dumps ( value , pickle . HIGHEST_PROTOCOL )
b64encoded = base64 . b64encode ( pickled )
if six . PY3 :
b64encoded = b64encoded . decode ( 'latin1' )
try :
with transaction . atomic ( using = db ) :
cursor . execute ( "SELECT cache_key, expires FROM %s " "WHERE cache_key = %%s" % table , [ key ] )
result = cursor . fetchone ( )
if result :
current_expires = result [ 1 ]
if ( connections [ db ] . features . needs_datetime_string_cast and not isinstance ( current_expires , datetime ) ) :
current_expires = typecast_timestamp ( str ( current_expires ) )
exp = connections [ db ] . ops . value_to_db_datetime ( exp )
if result and ( mode == 'set' or ( mode == 'add' and current_expires < now ) ) :
cursor . execute ( "UPDATE %s SET value = %%s, expires = %%s " "WHERE cache_key = %%s" % table , [ b64encoded , exp , key ] )
else :
cursor . execute ( "INSERT INTO %s (cache_key, value, expires) " "VALUES (%%s, %%s, %%s)" % table , [ key , b64encoded , exp ] )
except DatabaseError :
return False
else :
return True
def delete ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
db = router . db_for_write ( self . cache_model_class )
table = connections [ db ] . ops . quote_name ( self . _table )
with connections [ db ] . cursor ( ) as cursor :
cursor . execute ( "DELETE FROM %s WHERE cache_key = %%s" % table , [ key ] )
def has_key ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
db = router . db_for_read ( self . cache_model_class )
table = connections [ db ] . ops . quote_name ( self . _table )
if settings . USE_TZ :
now = datetime . utcnow ( )
else :
now = datetime . now ( )
now = now . replace ( microsecond = 0 )
with connections [ db ] . cursor ( ) as cursor :
cursor . execute ( "SELECT cache_key FROM %s " "WHERE cache_key = %%s and expires > %%s" % table , [ key , connections [ db ] . ops . value_to_db_datetime ( now ) ] )
return cursor . fetchone ( ) is not None
def _cull ( self , db , cursor , now ) :
if self . _cull_frequency == 0 :
self . clear ( )
else :
now = now . replace ( tzinfo = None )
table = connections [ db ] . ops . quote_name ( self . _table )
cursor . execute ( "DELETE FROM %s WHERE expires < %%s" % table , [ connections [ db ] . ops . value_to_db_datetime ( now ) ] )
cursor . execute ( "SELECT COUNT(*) FROM %s" % table )
num = cursor . fetchone ( ) [ 0 ]
if num > self . _max_entries :
cull_num = num // self . _cull_frequency
cursor . execute ( connections [ db ] . ops . cache_key_culling_sql ( ) % table , [ cull_num ] )
cursor . execute ( "DELETE FROM %s " "WHERE cache_key < %%s" % table , [ cursor . fetchone ( ) [ 0 ] ] )
def clear ( self ) :
db = router . db_for_write ( self . cache_model_class )
table = connections [ db ] . ops . quote_name ( self . _table )
with connections [ db ] . cursor ( ) as cursor :
cursor . execute ( 'DELETE FROM %s' % table )
class CacheClass ( DatabaseCache ) :
pass
from django . core . cache . backends . base import BaseCache , DEFAULT_TIMEOUT
class DummyCache ( BaseCache ) :
def __init__ ( self , host , * args , ** kwargs ) :
BaseCache . __init__ ( self , * args , ** kwargs )
def add ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
return True
def get ( self , key , default = None , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
return default
def set ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
def delete ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
def get_many ( self , keys , version = None ) :
return { }
def has_key ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
return False
def set_many ( self , data , timeout = DEFAULT_TIMEOUT , version = None ) :
pass
def delete_many ( self , keys , version = None ) :
pass
def clear ( self ) :
pass
class CacheClass ( DummyCache ) :
pass
import errno
import glob
import hashlib
import io
import os
import random
import tempfile
import time
import zlib
from django . core . cache . backends . base import BaseCache , DEFAULT_TIMEOUT
from django . core . files . move import file_move_safe
from django . utils . encoding import force_bytes
try :
from django . utils . six . moves import cPickle as pickle
except ImportError :
import pickle
class FileBasedCache ( BaseCache ) :
cache_suffix = '.djcache'
def __init__ ( self , dir , params ) :
super ( FileBasedCache , self ) . __init__ ( params )
self . _dir = os . path . abspath ( dir )
self . _createdir ( )
def add ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
if self . has_key ( key , version ) :
return False
self . set ( key , value , timeout , version )
return True
def get ( self , key , default = None , version = None ) :
fname = self . _key_to_file ( key , version )
if os . path . exists ( fname ) :
try :
with io . open ( fname , 'rb' ) as f :
if not self . _is_expired ( f ) :
return pickle . loads ( zlib . decompress ( f . read ( ) ) )
except IOError as e :
if e . errno == errno . ENOENT :
pass
return default
def set ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
self . _createdir ( )
fname = self . _key_to_file ( key , version )
self . _cull ( )
fd , tmp_path = tempfile . mkstemp ( dir = self . _dir )
renamed = False
try :
with io . open ( fd , 'wb' ) as f :
expiry = self . get_backend_timeout ( timeout )
f . write ( pickle . dumps ( expiry , - 1 ) )
f . write ( zlib . compress ( pickle . dumps ( value ) , - 1 ) )
file_move_safe ( tmp_path , fname , allow_overwrite = True )
renamed = True
finally :
if not renamed :
os . remove ( tmp_path )
def delete ( self , key , version = None ) :
self . _delete ( self . _key_to_file ( key , version ) )
def _delete ( self , fname ) :
if not fname . startswith ( self . _dir ) or not os . path . exists ( fname ) :
return
try :
os . remove ( fname )
except OSError as e :
if e . errno != errno . ENOENT :
raise
def has_key ( self , key , version = None ) :
fname = self . _key_to_file ( key , version )
if os . path . exists ( fname ) :
with io . open ( fname , 'rb' ) as f :
return not self . _is_expired ( f )
return False
def _cull ( self ) :
filelist = self . _list_cache_files ( )
num_entries = len ( filelist )
if num_entries < self . _max_entries :
return
if self . _cull_frequency == 0 :
return self . clear ( )
filelist = random . sample ( filelist , int ( num_entries / self . _cull_frequency ) )
for fname in filelist :
self . _delete ( fname )
def _createdir ( self ) :
if not os . path . exists ( self . _dir ) :
try :
os . makedirs ( self . _dir , 0o700 )
except OSError as e :
if e . errno != errno . EEXIST :
raise EnvironmentError ( "Cache directory '%s' does not exist " "and could not be created'" % self . _dir )
def _key_to_file ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
return os . path . join ( self . _dir , '' . join ( [ hashlib . md5 ( force_bytes ( key ) ) . hexdigest ( ) , self . cache_suffix ] ) )
def clear ( self ) :
if not os . path . exists ( self . _dir ) :
return
for fname in self . _list_cache_files ( ) :
self . _delete ( fname )
def _is_expired ( self , f ) :
exp = pickle . load ( f )
if exp is not None and exp < time . time ( ) :
f . close ( )
self . _delete ( f . name )
return True
return False
def _list_cache_files ( self ) :
if not os . path . exists ( self . _dir ) :
return [ ]
filelist = [ os . path . join ( self . _dir , fname ) for fname in glob . glob1 ( self . _dir , '*%s' % self . cache_suffix ) ]
return filelist
class CacheClass ( FileBasedCache ) :
pass
import time
try :
from django . utils . six . moves import cPickle as pickle
except ImportError :
import pickle
from django . core . cache . backends . base import BaseCache , DEFAULT_TIMEOUT
from django . utils . synch import RWLock
_caches = { }
_expire_info = { }
_locks = { }
class LocMemCache ( BaseCache ) :
def __init__ ( self , name , params ) :
BaseCache . __init__ ( self , params )
self . _cache = _caches . setdefault ( name , { } )
self . _expire_info = _expire_info . setdefault ( name , { } )
self . _lock = _locks . setdefault ( name , RWLock ( ) )
def add ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
pickled = pickle . dumps ( value , pickle . HIGHEST_PROTOCOL )
with self . _lock . writer ( ) :
if self . _has_expired ( key ) :
self . _set ( key , pickled , timeout )
return True
return False
def get ( self , key , default = None , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
pickled = None
with self . _lock . reader ( ) :
if not self . _has_expired ( key ) :
pickled = self . _cache [ key ]
if pickled is not None :
try :
return pickle . loads ( pickled )
except pickle . PickleError :
return default
with self . _lock . writer ( ) :
try :
del self . _cache [ key ]
del self . _expire_info [ key ]
except KeyError :
pass
return default
def _set ( self , key , value , timeout = DEFAULT_TIMEOUT ) :
if len ( self . _cache ) >= self . _max_entries :
self . _cull ( )
self . _cache [ key ] = value
self . _expire_info [ key ] = self . get_backend_timeout ( timeout )
def set ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
pickled = pickle . dumps ( value , pickle . HIGHEST_PROTOCOL )
with self . _lock . writer ( ) :
self . _set ( key , pickled , timeout )
def incr ( self , key , delta = 1 , version = None ) :
value = self . get ( key , version = version )
if value is None :
raise ValueError ( "Key '%s' not found" % key )
new_value = value + delta
key = self . make_key ( key , version = version )
pickled = pickle . dumps ( new_value , pickle . HIGHEST_PROTOCOL )
with self . _lock . writer ( ) :
self . _cache [ key ] = pickled
return new_value
def has_key ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
with self . _lock . reader ( ) :
if not self . _has_expired ( key ) :
return True
with self . _lock . writer ( ) :
try :
del self . _cache [ key ]
del self . _expire_info [ key ]
except KeyError :
pass
return False
def _has_expired ( self , key ) :
exp = self . _expire_info . get ( key , - 1 )
if exp is None or exp > time . time ( ) :
return False
return True
def _cull ( self ) :
if self . _cull_frequency == 0 :
self . clear ( )
else :
doomed = [ k for ( i , k ) in enumerate ( self . _cache ) if i % self . _cull_frequency == 0 ]
for k in doomed :
self . _delete ( k )
def _delete ( self , key ) :
try :
del self . _cache [ key ]
except KeyError :
pass
try :
del self . _expire_info [ key ]
except KeyError :
pass
def delete ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . validate_key ( key )
with self . _lock . writer ( ) :
self . _delete ( key )
def clear ( self ) :
self . _cache . clear ( )
self . _expire_info . clear ( )
class CacheClass ( LocMemCache ) :
pass
import time
import pickle
from django . core . cache . backends . base import BaseCache , DEFAULT_TIMEOUT
from django . utils import six
from django . utils . deprecation import RenameMethodsBase , RemovedInDjango19Warning
from django . utils . encoding import force_str
from django . utils . functional import cached_property
class BaseMemcachedCacheMethods ( RenameMethodsBase ) :
renamed_methods = ( ( '_get_memcache_timeout' , 'get_backend_timeout' , RemovedInDjango19Warning ) , )
class BaseMemcachedCache ( six . with_metaclass ( BaseMemcachedCacheMethods , BaseCache ) ) :
def __init__ ( self , server , params , library , value_not_found_exception ) :
super ( BaseMemcachedCache , self ) . __init__ ( params )
if isinstance ( server , six . string_types ) :
self . _servers = server . split ( ';' )
else :
self . _servers = server
self . LibraryValueNotFoundException = value_not_found_exception
self . _lib = library
self . _options = params . get ( 'OPTIONS' , None )
@ property
def _cache ( self ) :
if getattr ( self , '_client' , None ) is None :
self . _client = self . _lib . Client ( self . _servers )
return self . _client
def get_backend_timeout ( self , timeout = DEFAULT_TIMEOUT ) :
if timeout == DEFAULT_TIMEOUT :
timeout = self . default_timeout
if timeout is None :
return 0
elif int ( timeout ) == 0 :
timeout = - 1
if timeout > 2592000 :
timeout += int ( time . time ( ) )
return int ( timeout )
def make_key ( self , key , version = None ) :
return force_str ( super ( BaseMemcachedCache , self ) . make_key ( key , version ) )
def add ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
return self . _cache . add ( key , value , self . get_backend_timeout ( timeout ) )
def get ( self , key , default = None , version = None ) :
key = self . make_key ( key , version = version )
val = self . _cache . get ( key )
if val is None :
return default
return val
def set ( self , key , value , timeout = DEFAULT_TIMEOUT , version = None ) :
key = self . make_key ( key , version = version )
self . _cache . set ( key , value , self . get_backend_timeout ( timeout ) )
def delete ( self , key , version = None ) :
key = self . make_key ( key , version = version )
self . _cache . delete ( key )
def get_many ( self , keys , version = None ) :
new_keys = [ self . make_key ( x , version = version ) for x in keys ]
ret = self . _cache . get_multi ( new_keys )
if ret :
_ = { }
m = dict ( zip ( new_keys , keys ) )
for k , v in ret . items ( ) :
_ [ m [ k ] ] = v
ret = _
return ret
def close ( self , ** kwargs ) :
self . _cache . disconnect_all ( )
def incr ( self , key , delta = 1 , version = None ) :
key = self . make_key ( key , version = version )
if delta < 0 :
return self . _cache . decr ( key , - delta )
try :
val = self . _cache . incr ( key , delta )
except self . LibraryValueNotFoundException :
val = None
if val is None :
raise ValueError ( "Key '%s' not found" % key )
return val
def decr ( self , key , delta = 1 , version = None ) :
key = self . make_key ( key , version = version )
if delta < 0 :
return self . _cache . incr ( key , - delta )
try :
val = self . _cache . decr ( key , delta )
except self . LibraryValueNotFoundException :
val = None
if val is None :
raise ValueError ( "Key '%s' not found" % key )
return val
def set_many ( self , data , timeout = DEFAULT_TIMEOUT , version = None ) :
safe_data = { }
for key , value in data . items ( ) :
key = self . make_key ( key , version = version )
safe_data [ key ] = value
self . _cache . set_multi ( safe_data , self . get_backend_timeout ( timeout ) )
def delete_many ( self , keys , version = None ) :
l = lambda x : self . make_key ( x , version = version )
self . _cache . delete_multi ( map ( l , keys ) )
def clear ( self ) :
self . _cache . flush_all ( )
class MemcachedCache ( BaseMemcachedCache ) :
def __init__ ( self , server , params ) :
import memcache
super ( MemcachedCache , self ) . __init__ ( server , params , library = memcache , value_not_found_exception = ValueError )
@ property
def _cache ( self ) :
if getattr ( self , '_client' , None ) is None :
self . _client = self . _lib . Client ( self . _servers , pickleProtocol = pickle . HIGHEST_PROTOCOL )
return self . _client
class PyLibMCCache ( BaseMemcachedCache ) :
def __init__ ( self , server , params ) :
import pylibmc
super ( PyLibMCCache , self ) . __init__ ( server , params , library = pylibmc , value_not_found_exception = pylibmc . NotFound )
@ cached_property
def _cache ( self ) :
client = self . _lib . Client ( self . _servers )
if self . _options :
client . behaviors = self . _options
return client
from __future__ import unicode_literals
import hashlib
from django . utils . encoding import force_bytes
from django . utils . http import urlquote
TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
def make_template_fragment_key ( fragment_name , vary_on = None ) :
if vary_on is None :
vary_on = ( )
key = ':' . join ( urlquote ( var ) for var in vary_on )
args = hashlib . md5 ( force_bytes ( key ) )
return TEMPLATE_FRAGMENT_KEY_TEMPLATE % ( fragment_name , args . hexdigest ( ) )
from __future__ import unicode_literals
from . messages import ( CheckMessage , Debug , Info , Warning , Error , Critical , DEBUG , INFO , WARNING , ERROR , CRITICAL )
from . registry import register , run_checks , tag_exists , Tags
import django . core . checks . compatibility . django_1_6_0
import django . core . checks . compatibility . django_1_7_0
import django . core . checks . model_checks
__all__ = [ 'CheckMessage' , 'Debug' , 'Info' , 'Warning' , 'Error' , 'Critical' , 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' , 'CRITICAL' , 'register' , 'run_checks' , 'tag_exists' , 'Tags' , ]
from __future__ import unicode_literals
from django . apps import apps
from . . import Warning , register , Tags
@ register ( Tags . compatibility )
def check_1_6_compatibility ( ** kwargs ) :
errors = [ ]
errors . extend ( _check_test_runner ( ** kwargs ) )
errors . extend ( _check_boolean_field_default_value ( ** kwargs ) )
return errors
def _check_test_runner ( app_configs = None , ** kwargs ) :
from django . conf import settings
weight = 0
if not settings . is_overridden ( 'TEST_RUNNER' ) :
try :
settings . SITE_ID
weight += 2
except AttributeError :
pass
try :
settings . BASE_DIR
except AttributeError :
weight += 2
if settings . is_overridden ( 'TEMPLATE_LOADERS' ) :
weight += 2
if settings . is_overridden ( 'MANAGERS' ) :
weight += 2
if settings . is_overridden ( 'ADMINS' ) :
weight += 1
if 'django.middleware.clickjacking.XFrameOptionsMiddleware' not in set ( settings . MIDDLEWARE_CLASSES ) :
weight += 1
if weight >= 6 :
return [ Warning ( "Some project unittests may not execute as expected." , hint = ( "Django 1.6 introduced a new default test runner. It looks like " "this project was generated using Django 1.5 or earlier. You should " "ensure your tests are all running & behaving as expected. See " "https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner " "for more information." ) , obj = None , id = '1_6.W001' , ) ]
else :
return [ ]
def _check_boolean_field_default_value ( app_configs = None , ** kwargs ) :
from django . db import models
problem_fields = [ field for model in apps . get_models ( ** kwargs ) if app_configs is None or model . _meta . app_config in app_configs for field in model . _meta . local_fields if isinstance ( field , models . BooleanField ) and not field . has_default ( ) ]
return [ Warning ( "BooleanField does not have a default value." , hint = ( "Django 1.6 changed the default value of BooleanField from False to None. " "See https://docs.djangoproject.com/en/1.6/ref/models/fields/#booleanfield " "for more information." ) , obj = field , id = '1_6.W002' , ) for field in problem_fields ]
from __future__ import unicode_literals
from . . import Warning , register , Tags
@ register ( Tags . compatibility )
def check_1_7_compatibility ( ** kwargs ) :
errors = [ ]
errors . extend ( _check_middleware_classes ( ** kwargs ) )
return errors
def _check_middleware_classes ( app_configs = None , ** kwargs ) :
from django . conf import settings
if not settings . is_overridden ( 'MIDDLEWARE_CLASSES' ) :
return [ Warning ( "MIDDLEWARE_CLASSES is not set." , hint = ( "Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. " "django.contrib.sessions.middleware.SessionMiddleware, " "django.contrib.auth.middleware.AuthenticationMiddleware, and " "django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. " "If your project needs these middleware then you should configure this setting." ) , obj = None , id = '1_7.W001' , ) ]
else :
return [ ]
from __future__ import unicode_literals
from django . utils . encoding import python_2_unicode_compatible , force_str
DEBUG = 10
INFO = 20
WARNING = 30
ERROR = 40
CRITICAL = 50
@ python_2_unicode_compatible
class CheckMessage ( object ) :
def __init__ ( self , level , msg , hint = None , obj = None , id = None ) :
assert isinstance ( level , int ) , "The first argument should be level."
self . level = level
self . msg = msg
self . hint = hint
self . obj = obj
self . id = id
def __eq__ ( self , other ) :
return all ( getattr ( self , attr ) == getattr ( other , attr ) for attr in [ 'level' , 'msg' , 'hint' , 'obj' , 'id' ] )
def __ne__ ( self , other ) :
return not ( self == other )
def __str__ ( self ) :
from django . db import models
if self . obj is None :
obj = "?"
elif isinstance ( self . obj , models . base . ModelBase ) :
model = self . obj
app = model . _meta . app_label
obj = '%s.%s' % ( app , model . _meta . object_name )
else :
obj = force_str ( self . obj )
id = "(%s) " % self . id if self . id else ""
hint = "\n\tHINT: %s" % self . hint if self . hint else ''
return "%s: %s%s%s" % ( obj , id , self . msg , hint )
def __repr__ ( self ) :
return "<%s: level=%r, msg=%r, hint=%r, obj=%r, id=%r>" % ( self . __class__ . __name__ , self . level , self . msg , self . hint , self . obj , self . id )
def is_serious ( self ) :
return self . level >= ERROR
def is_silenced ( self ) :
from django . conf import settings
return self . id in settings . SILENCED_SYSTEM_CHECKS
class Debug ( CheckMessage ) :
def __init__ ( self , * args , ** kwargs ) :
return super ( Debug , self ) . __init__ ( DEBUG , * args , ** kwargs )
class Info ( CheckMessage ) :
def __init__ ( self , * args , ** kwargs ) :
return super ( Info , self ) . __init__ ( INFO , * args , ** kwargs )
class Warning ( CheckMessage ) :
def __init__ ( self , * args , ** kwargs ) :
return super ( Warning , self ) . __init__ ( WARNING , * args , ** kwargs )
class Error ( CheckMessage ) :
def __init__ ( self , * args , ** kwargs ) :
return super ( Error , self ) . __init__ ( ERROR , * args , ** kwargs )
class Critical ( CheckMessage ) :
def __init__ ( self , * args , ** kwargs ) :
return super ( Critical , self ) . __init__ ( CRITICAL , * args , ** kwargs )
from __future__ import unicode_literals
from itertools import chain
import types
from django . apps import apps
from . import Error , Tags , register
@ register ( Tags . models )
def check_all_models ( app_configs = None , ** kwargs ) :
errors = [ model . check ( ** kwargs ) for model in apps . get_models ( ) if app_configs is None or model . _meta . app_config in app_configs ]
return list ( chain ( * errors ) )
@ register ( Tags . models , Tags . signals )
def check_model_signals ( app_configs = None , ** kwargs ) :
from django . db import models
errors = [ ]
for name in dir ( models . signals ) :
obj = getattr ( models . signals , name )
if isinstance ( obj , models . signals . ModelSignal ) :
for reference , receivers in obj . unresolved_references . items ( ) :
for receiver , _ , _ in receivers :
if isinstance ( receiver , types . FunctionType ) :
description = "The '%s' function" % receiver . __name__
else :
description = "An instance of the '%s' class" % receiver . __class__ . __name__
errors . append ( Error ( "%s was connected to the '%s' signal " "with a lazy reference to the '%s' sender, " "which has not been installed." % ( description , name , '.' . join ( reference ) ) , obj = receiver . __module__ , hint = None , id = 'signals.E001' ) )
return errors
from __future__ import unicode_literals
from itertools import chain
from django . utils . itercompat import is_iterable
class Tags ( object ) :
admin = 'admin'
compatibility = 'compatibility'
models = 'models'
signals = 'signals'
class CheckRegistry ( object ) :
def __init__ ( self ) :
self . registered_checks = [ ]
def register ( self , * tags ) :
def inner ( check ) :
check . tags = tags
if check not in self . registered_checks :
self . registered_checks . append ( check )
return check
return inner
def run_checks ( self , app_configs = None , tags = None ) :
errors = [ ]
if tags is not None :
checks = [ check for check in self . registered_checks if hasattr ( check , 'tags' ) and set ( check . tags ) & set ( tags ) ]
else :
checks = self . registered_checks
for check in checks :
new_errors = check ( app_configs = app_configs )
assert is_iterable ( new_errors ) , ( "The function %r did not return a list. All functions registered " "with the checks registry must return a list." % check )
errors . extend ( new_errors )
return errors
def tag_exists ( self , tag ) :
return tag in self . tags_available ( )
def tags_available ( self ) :
return set ( chain ( * [ check . tags for check in self . registered_checks if hasattr ( check , 'tags' ) ] ) )
registry = CheckRegistry ( )
register = registry . register
run_checks = registry . run_checks
tag_exists = registry . tag_exists
from __future__ import unicode_literals
from django . conf import settings
from django . middleware . csrf import get_token
from django . utils import six
from django . utils . encoding import smart_text
from django . utils . functional import lazy
def csrf ( request ) :
def _get_val ( ) :
token = get_token ( request )
if token is None :
return 'NOTPROVIDED'
else :
return smart_text ( token )
_get_val = lazy ( _get_val , six . text_type )
return { 'csrf_token' : _get_val ( ) }
def debug ( request ) :
context_extras = { }
if settings . DEBUG and request . META . get ( 'REMOTE_ADDR' ) in settings . INTERNAL_IPS :
context_extras [ 'debug' ] = True
from django . db import connection
context_extras [ 'sql_queries' ] = connection . queries
return context_extras
def i18n ( request ) :
from django . utils import translation
context_extras = { }
context_extras [ 'LANGUAGES' ] = settings . LANGUAGES
context_extras [ 'LANGUAGE_CODE' ] = translation . get_language ( )
context_extras [ 'LANGUAGE_BIDI' ] = translation . get_language_bidi ( )
return context_extras
def tz ( request ) :
from django . utils import timezone
return { 'TIME_ZONE' : timezone . get_current_timezone_name ( ) }
def static ( request ) :
return { 'STATIC_URL' : settings . STATIC_URL }
def media ( request ) :
return { 'MEDIA_URL' : settings . MEDIA_URL }
def request ( request ) :
return { 'request' : request }
from functools import reduce
import operator
from django . utils import six
from django . utils . encoding import force_text
class DjangoRuntimeWarning ( RuntimeWarning ) :
pass
class AppRegistryNotReady ( Exception ) :
pass
class ObjectDoesNotExist ( Exception ) :
silent_variable_failure = True
class MultipleObjectsReturned ( Exception ) :
pass
class SuspiciousOperation ( Exception ) :
class SuspiciousMultipartForm ( SuspiciousOperation ) :
pass
class SuspiciousFileOperation ( SuspiciousOperation ) :
pass
class DisallowedHost ( SuspiciousOperation ) :
pass
class DisallowedRedirect ( SuspiciousOperation ) :
pass
class PermissionDenied ( Exception ) :
pass
class ViewDoesNotExist ( Exception ) :
pass
class MiddlewareNotUsed ( Exception ) :
pass
class ImproperlyConfigured ( Exception ) :
pass
class FieldError ( Exception ) :
pass
NON_FIELD_ERRORS = '__all__'
class ValidationError ( Exception ) :
def __init__ ( self , message , code = None , params = None ) :
super ( ValidationError , self ) . __init__ ( message , code , params )
if isinstance ( message , ValidationError ) :
if hasattr ( message , 'error_dict' ) :
message = message . error_dict
elif not hasattr ( message , 'message' if six . PY3 else 'code' ) :
message = message . error_list
else :
message , code , params = message . message , message . code , message . params
if isinstance ( message , dict ) :
self . error_dict = { }
for field , messages in message . items ( ) :
if not isinstance ( messages , ValidationError ) :
messages = ValidationError ( messages )
self . error_dict [ field ] = messages . error_list
elif isinstance ( message , list ) :
self . error_list = [ ]
for message in message :
if not isinstance ( message , ValidationError ) :
message = ValidationError ( message )
self . error_list . extend ( message . error_list )
else :
self . message = message
self . code = code
self . params = params
self . error_list = [ self ]
@ property
def message_dict ( self ) :
getattr ( self , 'error_dict' )
return dict ( self )
@ property
def messages ( self ) :
if hasattr ( self , 'error_dict' ) :
return reduce ( operator . add , dict ( self ) . values ( ) )
return list ( self )
def update_error_dict ( self , error_dict ) :
if hasattr ( self , 'error_dict' ) :
for field , error_list in self . error_dict . items ( ) :
error_dict . setdefault ( field , [ ] ) . extend ( error_list )
else :
error_dict . setdefault ( NON_FIELD_ERRORS , [ ] ) . extend ( self . error_list )
return error_dict
def __iter__ ( self ) :
if hasattr ( self , 'error_dict' ) :
for field , errors in self . error_dict . items ( ) :
yield field , list ( ValidationError ( errors ) )
else :
for error in self . error_list :
message = error . message
if error . params :
message %= error . params
yield force_text ( message )
def __str__ ( self ) :
if hasattr ( self , 'error_dict' ) :
return repr ( dict ( self ) )
return repr ( list ( self ) )
def __repr__ ( self ) :
return 'ValidationError(%s)' % self
from django . core . files . base import File
__all__ = [ 'File' ]
from __future__ import unicode_literals
import os
from io import BytesIO , StringIO , UnsupportedOperation
from django . utils . encoding import smart_text
from django . core . files . utils import FileProxyMixin
from django . utils import six
from django . utils . encoding import force_bytes , python_2_unicode_compatible
@ python_2_unicode_compatible
class File ( FileProxyMixin ) :
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
def __init__ ( self , file , name = None ) :
self . file = file
if name is None :
name = getattr ( file , 'name' , None )
self . name = name
if hasattr ( file , 'mode' ) :
self . mode = file . mode
def __str__ ( self ) :
return smart_text ( self . name or '' )
def __repr__ ( self ) :
return "<%s: %s>" % ( self . __class__ . __name__ , self or "None" )
def __bool__ ( self ) :
return bool ( self . name )
def __nonzero__ ( self ) :
return type ( self ) . __bool__ ( self )
def __len__ ( self ) :
return self . size
def _get_size_from_underlying_file ( self ) :
if hasattr ( self . file , 'size' ) :
return self . file . size
if hasattr ( self . file , 'name' ) :
try :
return os . path . getsize ( self . file . name )
except ( OSError , TypeError ) :
pass
if hasattr ( self . file , 'tell' ) and hasattr ( self . file , 'seek' ) :
pos = self . file . tell ( )
self . file . seek ( 0 , os . SEEK_END )
size = self . file . tell ( )
self . file . seek ( pos )
return size
raise AttributeError ( "Unable to determine the file's size." )
def _get_size ( self ) :
if hasattr ( self , '_size' ) :
return self . _size
self . _size = self . _get_size_from_underlying_file ( )
return self . _size
def _set_size ( self , size ) :
self . _size = size
size = property ( _get_size , _set_size )
def _get_closed ( self ) :
return not self . file or self . file . closed
closed = property ( _get_closed )
def chunks ( self , chunk_size = None ) :
if not chunk_size :
chunk_size = self . DEFAULT_CHUNK_SIZE
try :
self . seek ( 0 )
except ( AttributeError , UnsupportedOperation ) :
pass
while True :
data = self . read ( chunk_size )
if not data :
break
yield data
def multiple_chunks ( self , chunk_size = None ) :
if not chunk_size :
chunk_size = self . DEFAULT_CHUNK_SIZE
return self . size > chunk_size
def __iter__ ( self ) :
buffer_ = None
for chunk in self . chunks ( ) :
chunk_buffer = BytesIO ( chunk )
for line in chunk_buffer :
if buffer_ :
line = buffer_ + line
buffer_ = None
if line [ - 1 : ] in ( b'\n' , b'\r' ) :
yield line
else :
buffer_ = line
if buffer_ is not None :
yield buffer_
def __enter__ ( self ) :
return self
def __exit__ ( self , exc_type , exc_value , tb ) :
self . close ( )
def open ( self , mode = None ) :
if not self . closed :
self . seek ( 0 )
elif self . name and os . path . exists ( self . name ) :
self . file = open ( self . name , mode or self . mode )
else :
raise ValueError ( "The file cannot be reopened." )
def close ( self ) :
self . file . close ( )
@ python_2_unicode_compatible
class ContentFile ( File ) :
def __init__ ( self , content , name = None ) :
if six . PY3 :
stream_class = StringIO if isinstance ( content , six . text_type ) else BytesIO
else :
stream_class = BytesIO
content = force_bytes ( content )
super ( ContentFile , self ) . __init__ ( stream_class ( content ) , name = name )
self . size = len ( content )
def __str__ ( self ) :
return 'Raw content'
def __bool__ ( self ) :
return True
def __nonzero__ ( self ) :
return type ( self ) . __bool__ ( self )
def open ( self , mode = None ) :
self . seek ( 0 )
def close ( self ) :
pass
import zlib
from django . core . files import File
class ImageFile ( File ) :
def _get_width ( self ) :
return self . _get_image_dimensions ( ) [ 0 ]
width = property ( _get_width )
def _get_height ( self ) :
return self . _get_image_dimensions ( ) [ 1 ]
height = property ( _get_height )
def _get_image_dimensions ( self ) :
if not hasattr ( self , '_dimensions_cache' ) :
close = self . closed
self . open ( )
self . _dimensions_cache = get_image_dimensions ( self , close = close )
return self . _dimensions_cache
def get_image_dimensions ( file_or_path , close = False ) :
from PIL import ImageFile as PillowImageFile
p = PillowImageFile . Parser ( )
if hasattr ( file_or_path , 'read' ) :
file = file_or_path
file_pos = file . tell ( )
file . seek ( 0 )
else :
file = open ( file_or_path , 'rb' )
close = True
try :
chunk_size = 1024
while 1 :
data = file . read ( chunk_size )
if not data :
break
try :
p . feed ( data )
except zlib . error as e :
if e . args [ 0 ] . startswith ( "Error -5" ) :
pass
else :
raise
if p . image :
return p . image . size
chunk_size *= 2
return None
finally :
if close :
file . close ( )
else :
file . seek ( file_pos )
import os
__all__ = ( 'LOCK_EX' , 'LOCK_SH' , 'LOCK_NB' , 'lock' , 'unlock' )
def _fd ( f ) :
return f . fileno ( ) if hasattr ( f , 'fileno' ) else f
if os . name == 'nt' :
import msvcrt
from ctypes import ( sizeof , c_ulong , c_void_p , c_int64 , Structure , Union , POINTER , windll , byref )
from ctypes . wintypes import BOOL , DWORD , HANDLE
LOCK_SH = 0
LOCK_NB = 0x1
LOCK_EX = 0x2
if sizeof ( c_ulong ) != sizeof ( c_void_p ) :
ULONG_PTR = c_int64
else :
ULONG_PTR = c_ulong
PVOID = c_void_p
class _OFFSET ( Structure ) :
_fields_ = [ ( 'Offset' , DWORD ) , ( 'OffsetHigh' , DWORD ) ]
class _OFFSET_UNION ( Union ) :
_anonymous_ = [ '_offset' ]
_fields_ = [ ( '_offset' , _OFFSET ) , ( 'Pointer' , PVOID ) ]
class OVERLAPPED ( Structure ) :
_anonymous_ = [ '_offset_union' ]
_fields_ = [ ( 'Internal' , ULONG_PTR ) , ( 'InternalHigh' , ULONG_PTR ) , ( '_offset_union' , _OFFSET_UNION ) , ( 'hEvent' , HANDLE ) ]
LPOVERLAPPED = POINTER ( OVERLAPPED )
LockFileEx = windll . kernel32 . LockFileEx
LockFileEx . restype = BOOL
LockFileEx . argtypes = [ HANDLE , DWORD , DWORD , DWORD , DWORD , LPOVERLAPPED ]
UnlockFileEx = windll . kernel32 . UnlockFileEx
UnlockFileEx . restype = BOOL
UnlockFileEx . argtypes = [ HANDLE , DWORD , DWORD , DWORD , LPOVERLAPPED ]
def lock ( f , flags ) :
hfile = msvcrt . get_osfhandle ( _fd ( f ) )
overlapped = OVERLAPPED ( )
ret = LockFileEx ( hfile , flags , 0 , 0 , 0xFFFF0000 , byref ( overlapped ) )
return bool ( ret )
def unlock ( f ) :
hfile = msvcrt . get_osfhandle ( _fd ( f ) )
overlapped = OVERLAPPED ( )
ret = UnlockFileEx ( hfile , 0 , 0 , 0xFFFF0000 , byref ( overlapped ) )
return bool ( ret )
else :
try :
import fcntl
LOCK_SH = fcntl . LOCK_SH
LOCK_NB = fcntl . LOCK_NB
LOCK_EX = fcntl . LOCK_EX
except ( ImportError , AttributeError ) :
LOCK_EX = LOCK_SH = LOCK_NB = 0
def lock ( f , flags ) :
return False
def unlock ( f ) :
return True
else :
def lock ( f , flags ) :
ret = fcntl . flock ( _fd ( f ) , flags )
return ( ret == 0 )
def unlock ( f ) :
ret = fcntl . flock ( _fd ( f ) , fcntl . LOCK_UN )
return ( ret == 0 )
import os
from django . core . files import locks
try :
from shutil import copystat
except ImportError :
import stat
def copystat ( src , dst ) :
st = os . stat ( src )
mode = stat . S_IMODE ( st . st_mode )
if hasattr ( os , 'utime' ) :
os . utime ( dst , ( st . st_atime , st . st_mtime ) )
if hasattr ( os , 'chmod' ) :
os . chmod ( dst , mode )
__all__ = [ 'file_move_safe' ]
def _samefile ( src , dst ) :
if hasattr ( os . path , 'samefile' ) :
try :
return os . path . samefile ( src , dst )
except OSError :
return False
return ( os . path . normcase ( os . path . abspath ( src ) ) == os . path . normcase ( os . path . abspath ( dst ) ) )
def file_move_safe ( old_file_name , new_file_name , chunk_size = 1024 * 64 , allow_overwrite = False ) :
if _samefile ( old_file_name , new_file_name ) :
return
try :
if not allow_overwrite and os . access ( new_file_name , os . F_OK ) :
raise IOError ( "Destination file %s exists and allow_overwrite is False" % new_file_name )
os . rename ( old_file_name , new_file_name )
return
except OSError :
pass
with open ( old_file_name , 'rb' ) as old_file :
fd = os . open ( new_file_name , ( os . O_WRONLY | os . O_CREAT | getattr ( os , 'O_BINARY' , 0 ) | ( os . O_EXCL if not allow_overwrite else 0 ) ) )
try :
locks . lock ( fd , locks . LOCK_EX )
current_chunk = None
while current_chunk != b'' :
current_chunk = old_file . read ( chunk_size )
os . write ( fd , current_chunk )
finally :
locks . unlock ( fd )
os . close ( fd )
copystat ( old_file_name , new_file_name )
try :
os . remove ( old_file_name )
except OSError as e :
if getattr ( e , 'winerror' , 0 ) != 32 and getattr ( e , 'errno' , 0 ) != 13 :
raise
import os
import errno
import itertools
from datetime import datetime
from django . conf import settings
from django . core . exceptions import SuspiciousFileOperation
from django . core . files import locks , File
from django . core . files . move import file_move_safe
from django . utils . encoding import force_text , filepath_to_uri
from django . utils . functional import LazyObject
from django . utils . module_loading import import_string
from django . utils . six . moves . urllib . parse import urljoin
from django . utils . text import get_valid_filename
from django . utils . _os import safe_join , abspathu
from django . utils . deconstruct import deconstructible
__all__ = ( 'Storage' , 'FileSystemStorage' , 'DefaultStorage' , 'default_storage' )
class Storage ( object ) :
def open ( self , name , mode = 'rb' ) :
return self . _open ( name , mode )
def save ( self , name , content ) :
if name is None :
name = content . name
if not hasattr ( content , 'chunks' ) :
content = File ( content )
name = self . get_available_name ( name )
name = self . _save ( name , content )
return force_text ( name . replace ( '\\' , '/' ) )
def get_valid_name ( self , name ) :
return get_valid_filename ( name )
def get_available_name ( self , name ) :
dir_name , file_name = os . path . split ( name )
file_root , file_ext = os . path . splitext ( file_name )
count = itertools . count ( 1 )
while self . exists ( name ) :
name = os . path . join ( dir_name , "%s_%s%s" % ( file_root , next ( count ) , file_ext ) )
return name
def path ( self , name ) :
raise NotImplementedError ( "This backend doesn't support absolute paths." )
def delete ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide a delete() method' )
def exists ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide an exists() method' )
def listdir ( self , path ) :
raise NotImplementedError ( 'subclasses of Storage must provide a listdir() method' )
def size ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide a size() method' )
def url ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide a url() method' )
def accessed_time ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide an accessed_time() method' )
def created_time ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide a created_time() method' )
def modified_time ( self , name ) :
raise NotImplementedError ( 'subclasses of Storage must provide a modified_time() method' )
@ deconstructible
class FileSystemStorage ( Storage ) :
def __init__ ( self , location = None , base_url = None , file_permissions_mode = None , directory_permissions_mode = None ) :
if location is None :
location = settings . MEDIA_ROOT
self . base_location = location
self . location = abspathu ( self . base_location )
if base_url is None :
base_url = settings . MEDIA_URL
elif not base_url . endswith ( '/' ) :
base_url += '/'
self . base_url = base_url
self . file_permissions_mode = ( file_permissions_mode if file_permissions_mode is not None else settings . FILE_UPLOAD_PERMISSIONS )
self . directory_permissions_mode = ( directory_permissions_mode if directory_permissions_mode is not None else settings . FILE_UPLOAD_DIRECTORY_PERMISSIONS )
def _open ( self , name , mode = 'rb' ) :
return File ( open ( self . path ( name ) , mode ) )
def _save ( self , name , content ) :
full_path = self . path ( name )
directory = os . path . dirname ( full_path )
if not os . path . exists ( directory ) :
try :
if self . directory_permissions_mode is not None :
old_umask = os . umask ( 0 )
try :
os . makedirs ( directory , self . directory_permissions_mode )
finally :
os . umask ( old_umask )
else :
os . makedirs ( directory )
except OSError as e :
if e . errno != errno . EEXIST :
raise
if not os . path . isdir ( directory ) :
raise IOError ( "%s exists and is not a directory." % directory )
while True :
try :
if hasattr ( content , 'temporary_file_path' ) :
file_move_safe ( content . temporary_file_path ( ) , full_path )
else :
flags = ( os . O_WRONLY | os . O_CREAT | os . O_EXCL | getattr ( os , 'O_BINARY' , 0 ) )
fd = os . open ( full_path , flags , 0o666 )
_file = None
try :
locks . lock ( fd , locks . LOCK_EX )
for chunk in content . chunks ( ) :
if _file is None :
mode = 'wb' if isinstance ( chunk , bytes ) else 'wt'
_file = os . fdopen ( fd , mode )
_file . write ( chunk )
finally :
locks . unlock ( fd )
if _file is not None :
_file . close ( )
else :
os . close ( fd )
except OSError as e :
if e . errno == errno . EEXIST :
name = self . get_available_name ( name )
full_path = self . path ( name )
else :
raise
else :
break
if self . file_permissions_mode is not None :
os . chmod ( full_path , self . file_permissions_mode )
return name
def delete ( self , name ) :
assert name , "The name argument is not allowed to be empty."
name = self . path ( name )
if os . path . exists ( name ) :
try :
os . remove ( name )
except OSError as e :
if e . errno != errno . ENOENT :
raise
def exists ( self , name ) :
return os . path . exists ( self . path ( name ) )
def listdir ( self , path ) :
path = self . path ( path )
directories , files = [ ] , [ ]
for entry in os . listdir ( path ) :
if os . path . isdir ( os . path . join ( path , entry ) ) :
directories . append ( entry )
else :
files . append ( entry )
return directories , files
def path ( self , name ) :
try :
path = safe_join ( self . location , name )
except ValueError :
raise SuspiciousFileOperation ( "Attempted access to '%s' denied." % name )
return os . path . normpath ( path )
def size ( self , name ) :
return os . path . getsize ( self . path ( name ) )
def url ( self , name ) :
if self . base_url is None :
raise ValueError ( "This file is not accessible via a URL." )
return urljoin ( self . base_url , filepath_to_uri ( name ) )
def accessed_time ( self , name ) :
return datetime . fromtimestamp ( os . path . getatime ( self . path ( name ) ) )
def created_time ( self , name ) :
return datetime . fromtimestamp ( os . path . getctime ( self . path ( name ) ) )
def modified_time ( self , name ) :
return datetime . fromtimestamp ( os . path . getmtime ( self . path ( name ) ) )
def get_storage_class ( import_path = None ) :
return import_string ( import_path or settings . DEFAULT_FILE_STORAGE )
class DefaultStorage ( LazyObject ) :
def _setup ( self ) :
self . _wrapped = get_storage_class ( ) ( )
default_storage = DefaultStorage ( )
import os
import tempfile
from django . core . files . utils import FileProxyMixin
__all__ = ( 'NamedTemporaryFile' , 'gettempdir' , )
if os . name == 'nt' :
class TemporaryFile ( FileProxyMixin ) :
def __init__ ( self , mode = 'w+b' , bufsize = - 1 , suffix = '' , prefix = '' , dir = None ) :
fd , name = tempfile . mkstemp ( suffix = suffix , prefix = prefix , dir = dir )
self . name = name
self . file = os . fdopen ( fd , mode , bufsize )
self . close_called = False
unlink = os . unlink
def close ( self ) :
if not self . close_called :
self . close_called = True
try :
self . file . close ( )
except ( OSError , IOError ) :
pass
try :
self . unlink ( self . name )
except ( OSError ) :
pass
@ property
def closed ( self ) :
return self . file . closed
def __del__ ( self ) :
self . close ( )
def __enter__ ( self ) :
self . file . __enter__ ( )
return self
def __exit__ ( self , exc , value , tb ) :
self . file . __exit__ ( exc , value , tb )
NamedTemporaryFile = TemporaryFile
else :
NamedTemporaryFile = tempfile . NamedTemporaryFile
gettempdir = tempfile . gettempdir
import errno
import os
from io import BytesIO
from django . conf import settings
from django . core . files . base import File
from django . core . files import temp as tempfile
from django . utils . encoding import force_str
__all__ = ( 'UploadedFile' , 'TemporaryUploadedFile' , 'InMemoryUploadedFile' , 'SimpleUploadedFile' )
class UploadedFile ( File ) :
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
def __init__ ( self , file = None , name = None , content_type = None , size = None , charset = None , content_type_extra = None ) :
super ( UploadedFile , self ) . __init__ ( file , name )
self . size = size
self . content_type = content_type
self . charset = charset
self . content_type_extra = content_type_extra
def __repr__ ( self ) :
return force_str ( "<%s: %s (%s)>" % ( self . __class__ . __name__ , self . name , self . content_type ) )
def _get_name ( self ) :
return self . _name
def _set_name ( self , name ) :
if name is not None :
name = os . path . basename ( name )
if len ( name ) > 255 :
name , ext = os . path . splitext ( name )
ext = ext [ : 255 ]
name = name [ : 255 - len ( ext ) ] + ext
self . _name = name
name = property ( _get_name , _set_name )
class TemporaryUploadedFile ( UploadedFile ) :
def __init__ ( self , name , content_type , size , charset , content_type_extra = None ) :
if settings . FILE_UPLOAD_TEMP_DIR :
file = tempfile . NamedTemporaryFile ( suffix = '.upload' , dir = settings . FILE_UPLOAD_TEMP_DIR )
else :
file = tempfile . NamedTemporaryFile ( suffix = '.upload' )
super ( TemporaryUploadedFile , self ) . __init__ ( file , name , content_type , size , charset , content_type_extra )
def temporary_file_path ( self ) :
return self . file . name
def close ( self ) :
try :
return self . file . close ( )
except OSError as e :
if e . errno != errno . ENOENT :
raise
class InMemoryUploadedFile ( UploadedFile ) :
def __init__ ( self , file , field_name , name , content_type , size , charset , content_type_extra = None ) :
super ( InMemoryUploadedFile , self ) . __init__ ( file , name , content_type , size , charset , content_type_extra )
self . field_name = field_name
def open ( self , mode = None ) :
self . file . seek ( 0 )
def chunks ( self , chunk_size = None ) :
self . file . seek ( 0 )
yield self . read ( )
def multiple_chunks ( self , chunk_size = None ) :
return False
class SimpleUploadedFile ( InMemoryUploadedFile ) :
def __init__ ( self , name , content , content_type = 'text/plain' ) :
content = content or b''
super ( SimpleUploadedFile , self ) . __init__ ( BytesIO ( content ) , None , name , content_type , len ( content ) , None , None )
@ classmethod
def from_dict ( cls , file_dict ) :
return cls ( file_dict [ 'filename' ] , file_dict [ 'content' ] , file_dict . get ( 'content-type' , 'text/plain' ) )
from __future__ import unicode_literals
from io import BytesIO
from django . conf import settings
from django . core . files . uploadedfile import TemporaryUploadedFile , InMemoryUploadedFile
from django . utils . encoding import python_2_unicode_compatible
from django . utils . module_loading import import_string
__all__ = [ 'UploadFileException' , 'StopUpload' , 'SkipFile' , 'FileUploadHandler' , 'TemporaryFileUploadHandler' , 'MemoryFileUploadHandler' , 'load_handler' , 'StopFutureHandlers' ]
class UploadFileException ( Exception ) :
pass
@ python_2_unicode_compatible
class StopUpload ( UploadFileException ) :
def __init__ ( self , connection_reset = False ) :
self . connection_reset = connection_reset
def __str__ ( self ) :
if self . connection_reset :
return 'StopUpload: Halt current upload.'
else :
return 'StopUpload: Consume request data, then halt.'
class SkipFile ( UploadFileException ) :
pass
class StopFutureHandlers ( UploadFileException ) :
pass
class FileUploadHandler ( object ) :
chunk_size = 64 * 2 ** 10
def __init__ ( self , request = None ) :
self . file_name = None
self . content_type = None
self . content_length = None
self . charset = None
self . content_type_extra = None
self . request = request
def handle_raw_input ( self , input_data , META , content_length , boundary , encoding = None ) :
pass
def new_file ( self , field_name , file_name , content_type , content_length , charset = None , content_type_extra = None ) :
self . field_name = field_name
self . file_name = file_name
self . content_type = content_type
self . content_length = content_length
self . charset = charset
self . content_type_extra = content_type_extra
def receive_data_chunk ( self , raw_data , start ) :
raise NotImplementedError ( 'subclasses of FileUploadHandler must provide a receive_data_chunk() method' )
def file_complete ( self , file_size ) :
raise NotImplementedError ( 'subclasses of FileUploadHandler must provide a file_complete() method' )
def upload_complete ( self ) :
pass
class TemporaryFileUploadHandler ( FileUploadHandler ) :
def __init__ ( self , * args , ** kwargs ) :
super ( TemporaryFileUploadHandler , self ) . __init__ ( * args , ** kwargs )
def new_file ( self , file_name , * args , ** kwargs ) :
super ( TemporaryFileUploadHandler , self ) . new_file ( file_name , * args , ** kwargs )
self . file = TemporaryUploadedFile ( self . file_name , self . content_type , 0 , self . charset , self . content_type_extra )
def receive_data_chunk ( self , raw_data , start ) :
self . file . write ( raw_data )
def file_complete ( self , file_size ) :
self . file . seek ( 0 )
self . file . size = file_size
return self . file
class MemoryFileUploadHandler ( FileUploadHandler ) :
def handle_raw_input ( self , input_data , META , content_length , boundary , encoding = None ) :
if content_length > settings . FILE_UPLOAD_MAX_MEMORY_SIZE :
self . activated = False
else :
self . activated = True
def new_file ( self , * args , ** kwargs ) :
super ( MemoryFileUploadHandler , self ) . new_file ( * args , ** kwargs )
if self . activated :
self . file = BytesIO ( )
raise StopFutureHandlers ( )
def receive_data_chunk ( self , raw_data , start ) :
if self . activated :
self . file . write ( raw_data )
else :
return raw_data
def file_complete ( self , file_size ) :
if not self . activated :
return
self . file . seek ( 0 )
return InMemoryUploadedFile ( file = self . file , field_name = self . field_name , name = self . file_name , content_type = self . content_type , size = file_size , charset = self . charset , content_type_extra = self . content_type_extra )
def load_handler ( path , * args , ** kwargs ) :
return import_string ( path ) ( * args , ** kwargs )
class FileProxyMixin ( object ) :
encoding = property ( lambda self : self . file . encoding )
fileno = property ( lambda self : self . file . fileno )
flush = property ( lambda self : self . file . flush )
isatty = property ( lambda self : self . file . isatty )
newlines = property ( lambda self : self . file . newlines )
read = property ( lambda self : self . file . read )
readinto = property ( lambda self : self . file . readinto )
readline = property ( lambda self : self . file . readline )
readlines = property ( lambda self : self . file . readlines )
seek = property ( lambda self : self . file . seek )
softspace = property ( lambda self : self . file . softspace )
tell = property ( lambda self : self . file . tell )
truncate = property ( lambda self : self . file . truncate )
write = property ( lambda self : self . file . write )
writelines = property ( lambda self : self . file . writelines )
xreadlines = property ( lambda self : self . file . xreadlines )
def __iter__ ( self ) :
return iter ( self . file )
from __future__ import unicode_literals
import logging
import sys
import types
from django import http
from django . conf import settings
from django . core import urlresolvers
from django . core import signals
from django . core . exceptions import MiddlewareNotUsed , PermissionDenied , SuspiciousOperation
from django . db import connections , transaction
from django . utils . encoding import force_text
from django . utils . module_loading import import_string
from django . utils import six
from django . views import debug
logger = logging . getLogger ( 'django.request' )
class BaseHandler ( object ) :
response_fixes = [ http . fix_location_header , http . conditional_content_removal , ]
def __init__ ( self ) :
self . _request_middleware = self . _view_middleware = self . _template_response_middleware = self . _response_middleware = self . _exception_middleware = None
def load_middleware ( self ) :
self . _view_middleware = [ ]
self . _template_response_middleware = [ ]
self . _response_middleware = [ ]
self . _exception_middleware = [ ]
request_middleware = [ ]
for middleware_path in settings . MIDDLEWARE_CLASSES :
mw_class = import_string ( middleware_path )
try :
mw_instance = mw_class ( )
except MiddlewareNotUsed :
continue
if hasattr ( mw_instance , 'process_request' ) :
request_middleware . append ( mw_instance . process_request )
if hasattr ( mw_instance , 'process_view' ) :
self . _view_middleware . append ( mw_instance . process_view )
if hasattr ( mw_instance , 'process_template_response' ) :
self . _template_response_middleware . insert ( 0 , mw_instance . process_template_response )
if hasattr ( mw_instance , 'process_response' ) :
self . _response_middleware . insert ( 0 , mw_instance . process_response )
if hasattr ( mw_instance , 'process_exception' ) :
self . _exception_middleware . insert ( 0 , mw_instance . process_exception )
self . _request_middleware = request_middleware
def make_view_atomic ( self , view ) :
non_atomic_requests = getattr ( view , '_non_atomic_requests' , set ( ) )
for db in connections . all ( ) :
if ( db . settings_dict [ 'ATOMIC_REQUESTS' ] and db . alias not in non_atomic_requests ) :
view = transaction . atomic ( using = db . alias ) ( view )
return view
def get_exception_response ( self , request , resolver , status_code ) :
try :
callback , param_dict = resolver . resolve_error_handler ( status_code )
response = callback ( request , ** param_dict )
except :
signals . got_request_exception . send ( sender = self . __class__ , request = request )
response = self . handle_uncaught_exception ( request , resolver , sys . exc_info ( ) )
return response
def get_response ( self , request ) :
urlconf = settings . ROOT_URLCONF
urlresolvers . set_urlconf ( urlconf )
resolver = urlresolvers . RegexURLResolver ( r'^/' , urlconf )
try :
response = None
for middleware_method in self . _request_middleware :
response = middleware_method ( request )
if response :
break
if response is None :
if hasattr ( request , 'urlconf' ) :
urlconf = request . urlconf
urlresolvers . set_urlconf ( urlconf )
resolver = urlresolvers . RegexURLResolver ( r'^/' , urlconf )
resolver_match = resolver . resolve ( request . path_info )
callback , callback_args , callback_kwargs = resolver_match
request . resolver_match = resolver_match
for middleware_method in self . _view_middleware :
response = middleware_method ( request , callback , callback_args , callback_kwargs )
if response :
break
if response is None :
wrapped_callback = self . make_view_atomic ( callback )
try :
response = wrapped_callback ( request , * callback_args , ** callback_kwargs )
except Exception as e :
for middleware_method in self . _exception_middleware :
response = middleware_method ( request , e )
if response :
break
if response is None :
raise
if response is None :
if isinstance ( callback , types . FunctionType ) :
view_name = callback . __name__
else :
view_name = callback . __class__ . __name__ + '.__call__'
raise ValueError ( "The view %s.%s didn't return an HttpResponse object. It returned None instead." % ( callback . __module__ , view_name ) )
if hasattr ( response , 'render' ) and callable ( response . render ) :
for middleware_method in self . _template_response_middleware :
response = middleware_method ( request , response )
if response is None :
raise ValueError ( "%s.process_template_response didn't return an " "HttpResponse object. It returned None instead." % ( middleware_method . __self__ . __class__ . __name__ ) )
response = response . render ( )
except http . Http404 as e :
logger . warning ( 'Not Found: %s' , request . path , extra = { 'status_code' : 404 , 'request' : request } )
if settings . DEBUG :
response = debug . technical_404_response ( request , e )
else :
response = self . get_exception_response ( request , resolver , 404 )
except PermissionDenied :
logger . warning ( 'Forbidden (Permission denied): %s' , request . path , extra = { 'status_code' : 403 , 'request' : request } )
response = self . get_exception_response ( request , resolver , 403 )
except SuspiciousOperation as e :
security_logger = logging . getLogger ( 'django.security.%s' % e . __class__ . __name__ )
security_logger . error ( force_text ( e ) , extra = { 'status_code' : 400 , 'request' : request } )
if settings . DEBUG :
return debug . technical_500_response ( request , * sys . exc_info ( ) , status_code = 400 )
response = self . get_exception_response ( request , resolver , 400 )
except SystemExit :
raise
except :
signals . got_request_exception . send ( sender = self . __class__ , request = request )
response = self . handle_uncaught_exception ( request , resolver , sys . exc_info ( ) )
try :
for middleware_method in self . _response_middleware :
response = middleware_method ( request , response )
if response is None :
raise ValueError ( "%s.process_response didn't return an " "HttpResponse object. It returned None instead." % ( middleware_method . __self__ . __class__ . __name__ ) )
response = self . apply_response_fixes ( request , response )
except :
signals . got_request_exception . send ( sender = self . __class__ , request = request )
response = self . handle_uncaught_exception ( request , resolver , sys . exc_info ( ) )
response . _closable_objects . append ( request )
return response
def handle_uncaught_exception ( self , request , resolver , exc_info ) :
if settings . DEBUG_PROPAGATE_EXCEPTIONS :
raise
logger . error ( 'Internal Server Error: %s' , request . path , exc_info = exc_info , extra = { 'status_code' : 500 , 'request' : request } )
if settings . DEBUG :
return debug . technical_500_response ( request , * exc_info )
if resolver . urlconf_module is None :
six . reraise ( * exc_info )
callback , param_dict = resolver . resolve_error_handler ( 500 )
return callback ( request , ** param_dict )
def apply_response_fixes ( self , request , response ) :
for func in self . response_fixes :
response = func ( request , response )
return response
from __future__ import unicode_literals
import cgi
import codecs
import logging
import sys
from io import BytesIO
from threading import Lock
import warnings
from django import http
from django . conf import settings
from django . core import signals
from django . core . handlers import base
from django . core . urlresolvers import set_script_prefix
from django . utils import datastructures
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . encoding import force_str , force_text
from django . utils . functional import cached_property
from django . utils import six
from django . http . response import REASON_PHRASES as STATUS_CODE_TEXT
logger = logging . getLogger ( 'django.request' )
ISO_8859_1 , UTF_8 = str ( 'iso-8859-1' ) , str ( 'utf-8' )
class LimitedStream ( object ) :
def __init__ ( self , stream , limit , buf_size = 64 * 1024 * 1024 ) :
self . stream = stream
self . remaining = limit
self . buffer = b''
self . buf_size = buf_size
def _read_limited ( self , size = None ) :
if size is None or size > self . remaining :
size = self . remaining
if size == 0 :
return b''
result = self . stream . read ( size )
self . remaining -= len ( result )
return result
def read ( self , size = None ) :
if size is None :
result = self . buffer + self . _read_limited ( )
self . buffer = b''
elif size < len ( self . buffer ) :
result = self . buffer [ : size ]
self . buffer = self . buffer [ size : ]
else :
result = self . buffer + self . _read_limited ( size - len ( self . buffer ) )
self . buffer = b''
return result
def readline ( self , size = None ) :
while b'\n' not in self . buffer and ( size is None or len ( self . buffer ) < size ) :
if size :
chunk = self . _read_limited ( size - len ( self . buffer ) )
else :
chunk = self . _read_limited ( )
if not chunk :
break
self . buffer += chunk
sio = BytesIO ( self . buffer )
if size :
line = sio . readline ( size )
else :
line = sio . readline ( )
self . buffer = sio . read ( )
return line
class WSGIRequest ( http . HttpRequest ) :
def __init__ ( self , environ ) :
script_name = get_script_name ( environ )
path_info = get_path_info ( environ )
if not path_info :
path_info = '/'
self . environ = environ
self . path_info = path_info
self . path = '%s/%s' % ( script_name . rstrip ( '/' ) , path_info . replace ( '/' , '' , 1 ) )
self . META = environ
self . META [ 'PATH_INFO' ] = path_info
self . META [ 'SCRIPT_NAME' ] = script_name
self . method = environ [ 'REQUEST_METHOD' ] . upper ( )
_ , content_params = cgi . parse_header ( environ . get ( 'CONTENT_TYPE' , '' ) )
if 'charset' in content_params :
try :
codecs . lookup ( content_params [ 'charset' ] )
except LookupError :
pass
else :
self . encoding = content_params [ 'charset' ]
self . _post_parse_error = False
try :
content_length = int ( environ . get ( 'CONTENT_LENGTH' ) )
except ( ValueError , TypeError ) :
content_length = 0
self . _stream = LimitedStream ( self . environ [ 'wsgi.input' ] , content_length )
self . _read_started = False
self . resolver_match = None
def _get_scheme ( self ) :
return self . environ . get ( 'wsgi.url_scheme' )
def _get_request ( self ) :
warnings . warn ( '`request.REQUEST` is deprecated, use `request.GET` or ' '`request.POST` instead.' , RemovedInDjango19Warning , 2 )
if not hasattr ( self , '_request' ) :
self . _request = datastructures . MergeDict ( self . POST , self . GET )
return self . _request
@ cached_property
def GET ( self ) :
raw_query_string = get_bytes_from_wsgi ( self . environ , 'QUERY_STRING' , '' )
return http . QueryDict ( raw_query_string , encoding = self . _encoding )
def _get_post ( self ) :
if not hasattr ( self , '_post' ) :
self . _load_post_and_files ( )
return self . _post
def _set_post ( self , post ) :
self . _post = post
@ cached_property
def COOKIES ( self ) :
raw_cookie = get_str_from_wsgi ( self . environ , 'HTTP_COOKIE' , '' )
return http . parse_cookie ( raw_cookie )
def _get_files ( self ) :
if not hasattr ( self , '_files' ) :
self . _load_post_and_files ( )
return self . _files
POST = property ( _get_post , _set_post )
FILES = property ( _get_files )
REQUEST = property ( _get_request )
class WSGIHandler ( base . BaseHandler ) :
initLock = Lock ( )
request_class = WSGIRequest
def __call__ ( self , environ , start_response ) :
if self . _request_middleware is None :
with self . initLock :
try :
if self . _request_middleware is None :
self . load_middleware ( )
except :
self . _request_middleware = None
raise
set_script_prefix ( get_script_name ( environ ) )
signals . request_started . send ( sender = self . __class__ )
try :
request = self . request_class ( environ )
except UnicodeDecodeError :
logger . warning ( 'Bad Request (UnicodeDecodeError)' , exc_info = sys . exc_info ( ) , extra = { 'status_code' : 400 , } )
response = http . HttpResponseBadRequest ( )
else :
response = self . get_response ( request )
response . _handler_class = self . __class__
status = '%s %s' % ( response . status_code , response . reason_phrase )
response_headers = [ ( str ( k ) , str ( v ) ) for k , v in response . items ( ) ]
for c in response . cookies . values ( ) :
response_headers . append ( ( str ( 'Set-Cookie' ) , str ( c . output ( header = '' ) ) ) )
start_response ( force_str ( status ) , response_headers )
return response
def get_path_info ( environ ) :
path_info = get_bytes_from_wsgi ( environ , 'PATH_INFO' , '/' )
return path_info . decode ( UTF_8 )
def get_script_name ( environ ) :
if settings . FORCE_SCRIPT_NAME is not None :
return force_text ( settings . FORCE_SCRIPT_NAME )
script_url = get_bytes_from_wsgi ( environ , 'SCRIPT_URL' , '' )
if not script_url :
script_url = get_bytes_from_wsgi ( environ , 'REDIRECT_URL' , '' )
if script_url :
path_info = get_bytes_from_wsgi ( environ , 'PATH_INFO' , '' )
script_name = script_url [ : - len ( path_info ) ]
else :
script_name = get_bytes_from_wsgi ( environ , 'SCRIPT_NAME' , '' )
return script_name . decode ( UTF_8 )
def get_bytes_from_wsgi ( environ , key , default ) :
value = environ . get ( str ( key ) , str ( default ) )
return value if six . PY2 else value . encode ( ISO_8859_1 )
def get_str_from_wsgi ( environ , key , default ) :
value = environ . get ( str ( key ) , str ( default ) )
return value if six . PY2 else value . encode ( ISO_8859_1 ) . decode ( UTF_8 )
from __future__ import unicode_literals
from django . conf import settings
from django . utils . module_loading import import_string
from django . core . mail . utils import CachedDnsName , DNS_NAME
from django . core . mail . message import ( EmailMessage , EmailMultiAlternatives , SafeMIMEText , SafeMIMEMultipart , DEFAULT_ATTACHMENT_MIME_TYPE , make_msgid , BadHeaderError , forbid_multi_line_headers )
__all__ = [ 'CachedDnsName' , 'DNS_NAME' , 'EmailMessage' , 'EmailMultiAlternatives' , 'SafeMIMEText' , 'SafeMIMEMultipart' , 'DEFAULT_ATTACHMENT_MIME_TYPE' , 'make_msgid' , 'BadHeaderError' , 'forbid_multi_line_headers' , 'get_connection' , 'send_mail' , 'send_mass_mail' , 'mail_admins' , 'mail_managers' , ]
def get_connection ( backend = None , fail_silently = False , ** kwds ) :
klass = import_string ( backend or settings . EMAIL_BACKEND )
return klass ( fail_silently = fail_silently , ** kwds )
def send_mail ( subject , message , from_email , recipient_list , fail_silently = False , auth_user = None , auth_password = None , connection = None , html_message = None ) :
connection = connection or get_connection ( username = auth_user , password = auth_password , fail_silently = fail_silently )
mail = EmailMultiAlternatives ( subject , message , from_email , recipient_list , connection = connection )
if html_message :
mail . attach_alternative ( html_message , 'text/html' )
return mail . send ( )
def send_mass_mail ( datatuple , fail_silently = False , auth_user = None , auth_password = None , connection = None ) :
connection = connection or get_connection ( username = auth_user , password = auth_password , fail_silently = fail_silently )
messages = [ EmailMessage ( subject , message , sender , recipient , connection = connection ) for subject , message , sender , recipient in datatuple ]
return connection . send_messages ( messages )
def mail_admins ( subject , message , fail_silently = False , connection = None , html_message = None ) :
if not settings . ADMINS :
return
mail = EmailMultiAlternatives ( '%s%s' % ( settings . EMAIL_SUBJECT_PREFIX , subject ) , message , settings . SERVER_EMAIL , [ a [ 1 ] for a in settings . ADMINS ] , connection = connection )
if html_message :
mail . attach_alternative ( html_message , 'text/html' )
mail . send ( fail_silently = fail_silently )
def mail_managers ( subject , message , fail_silently = False , connection = None , html_message = None ) :
if not settings . MANAGERS :
return
mail = EmailMultiAlternatives ( '%s%s' % ( settings . EMAIL_SUBJECT_PREFIX , subject ) , message , settings . SERVER_EMAIL , [ a [ 1 ] for a in settings . MANAGERS ] , connection = connection )
if html_message :
mail . attach_alternative ( html_message , 'text/html' )
mail . send ( fail_silently = fail_silently )
class BaseEmailBackend ( object ) :
def __init__ ( self , fail_silently = False , ** kwargs ) :
self . fail_silently = fail_silently
def open ( self ) :
pass
def close ( self ) :
pass
def __enter__ ( self ) :
self . open ( )
return self
def __exit__ ( self , exc_type , exc_value , traceback ) :
self . close ( )
def send_messages ( self , email_messages ) :
raise NotImplementedError ( 'subclasses of BaseEmailBackend must override send_messages() method' )
import sys
import threading
from django . core . mail . backends . base import BaseEmailBackend
from django . utils import six
class EmailBackend ( BaseEmailBackend ) :
def __init__ ( self , * args , ** kwargs ) :
self . stream = kwargs . pop ( 'stream' , sys . stdout )
self . _lock = threading . RLock ( )
super ( EmailBackend , self ) . __init__ ( * args , ** kwargs )
def write_message ( self , message ) :
msg = message . message ( )
msg_data = msg . as_bytes ( )
if six . PY3 :
charset = msg . get_charset ( ) . get_output_charset ( ) if msg . get_charset ( ) else 'utf-8'
msg_data = msg_data . decode ( charset )
self . stream . write ( '%s\n' % msg_data )
self . stream . write ( '-' * 79 )
self . stream . write ( '\n' )
def send_messages ( self , email_messages ) :
if not email_messages :
return
msg_count = 0
with self . _lock :
try :
stream_created = self . open ( )
for message in email_messages :
self . write_message ( message )
self . stream . flush ( )
msg_count += 1
if stream_created :
self . close ( )
except Exception :
if not self . fail_silently :
raise
return msg_count
from django . core . mail . backends . base import BaseEmailBackend
class EmailBackend ( BaseEmailBackend ) :
def send_messages ( self , email_messages ) :
return len ( list ( email_messages ) )
import datetime
import os
from django . conf import settings
from django . core . exceptions import ImproperlyConfigured
from django . core . mail . backends . console import EmailBackend as ConsoleEmailBackend
from django . utils import six
class EmailBackend ( ConsoleEmailBackend ) :
def __init__ ( self , * args , ** kwargs ) :
self . _fname = None
if 'file_path' in kwargs :
self . file_path = kwargs . pop ( 'file_path' )
else :
self . file_path = getattr ( settings , 'EMAIL_FILE_PATH' , None )
if not isinstance ( self . file_path , six . string_types ) :
raise ImproperlyConfigured ( 'Path for saving emails is invalid: %r' % self . file_path )
self . file_path = os . path . abspath ( self . file_path )
if os . path . exists ( self . file_path ) and not os . path . isdir ( self . file_path ) :
raise ImproperlyConfigured ( 'Path for saving email messages exists, but is not a directory: %s' % self . file_path )
elif not os . path . exists ( self . file_path ) :
try :
os . makedirs ( self . file_path )
except OSError as err :
raise ImproperlyConfigured ( 'Could not create directory for saving email messages: %s (%s)' % ( self . file_path , err ) )
if not os . access ( self . file_path , os . W_OK ) :
raise ImproperlyConfigured ( 'Could not write to directory: %s' % self . file_path )
kwargs [ 'stream' ] = None
super ( EmailBackend , self ) . __init__ ( * args , ** kwargs )
def write_message ( self , message ) :
self . stream . write ( message . message ( ) . as_bytes ( ) + b'\n' )
self . stream . write ( b'-' * 79 )
self . stream . write ( b'\n' )
def _get_filename ( self ) :
if self . _fname is None :
timestamp = datetime . datetime . now ( ) . strftime ( "%Y%m%d-%H%M%S" )
fname = "%s-%s.log" % ( timestamp , abs ( id ( self ) ) )
self . _fname = os . path . join ( self . file_path , fname )
return self . _fname
def open ( self ) :
if self . stream is None :
self . stream = open ( self . _get_filename ( ) , 'ab' )
return True
return False
def close ( self ) :
try :
if self . stream is not None :
self . stream . close ( )
finally :
self . stream = None
from django . core import mail
from django . core . mail . backends . base import BaseEmailBackend
class EmailBackend ( BaseEmailBackend ) :
def __init__ ( self , * args , ** kwargs ) :
super ( EmailBackend , self ) . __init__ ( * args , ** kwargs )
if not hasattr ( mail , 'outbox' ) :
mail . outbox = [ ]
def send_messages ( self , messages ) :
msg_count = 0
for message in messages :
message . message ( )
msg_count += 1
mail . outbox . extend ( messages )
return msg_count
import smtplib
import ssl
import threading
from django . conf import settings
from django . core . mail . backends . base import BaseEmailBackend
from django . core . mail . utils import DNS_NAME
from django . core . mail . message import sanitize_address
class EmailBackend ( BaseEmailBackend ) :
def __init__ ( self , host = None , port = None , username = None , password = None , use_tls = None , fail_silently = False , use_ssl = None , timeout = None , ** kwargs ) :
super ( EmailBackend , self ) . __init__ ( fail_silently = fail_silently )
self . host = host or settings . EMAIL_HOST
self . port = port or settings . EMAIL_PORT
self . username = settings . EMAIL_HOST_USER if username is None else username
self . password = settings . EMAIL_HOST_PASSWORD if password is None else password
self . use_tls = settings . EMAIL_USE_TLS if use_tls is None else use_tls
self . use_ssl = settings . EMAIL_USE_SSL if use_ssl is None else use_ssl
self . timeout = timeout
if self . use_ssl and self . use_tls :
raise ValueError ( "EMAIL_USE_TLS/EMAIL_USE_SSL are mutually exclusive, so only set " "one of those settings to True." )
self . connection = None
self . _lock = threading . RLock ( )
def open ( self ) :
if self . connection :
return False
connection_class = smtplib . SMTP_SSL if self . use_ssl else smtplib . SMTP
connection_params = { 'local_hostname' : DNS_NAME . get_fqdn ( ) }
if self . timeout is not None :
connection_params [ 'timeout' ] = self . timeout
try :
self . connection = connection_class ( self . host , self . port , ** connection_params )
if not self . use_ssl and self . use_tls :
self . connection . ehlo ( )
self . connection . starttls ( )
self . connection . ehlo ( )
if self . username and self . password :
self . connection . login ( self . username , self . password )
return True
except smtplib . SMTPException :
if not self . fail_silently :
raise
def close ( self ) :
if self . connection is None :
return
try :
try :
self . connection . quit ( )
except ( ssl . SSLError , smtplib . SMTPServerDisconnected ) :
self . connection . close ( )
except smtplib . SMTPException :
if self . fail_silently :
return
raise
finally :
self . connection = None
def send_messages ( self , email_messages ) :
if not email_messages :
return
with self . _lock :
new_conn_created = self . open ( )
if not self . connection :
return
num_sent = 0
for message in email_messages :
sent = self . _send ( message )
if sent :
num_sent += 1
if new_conn_created :
self . close ( )
return num_sent
def _send ( self , email_message ) :
if not email_message . recipients ( ) :
return False
from_email = sanitize_address ( email_message . from_email , email_message . encoding )
recipients = [ sanitize_address ( addr , email_message . encoding ) for addr in email_message . recipients ( ) ]
message = email_message . message ( )
try :
self . connection . sendmail ( from_email , recipients , message . as_bytes ( ) )
except smtplib . SMTPException :
if not self . fail_silently :
raise
return False
return True
from __future__ import unicode_literals
import mimetypes
import os
import random
import sys
import time
from email import ( charset as Charset , encoders as Encoders , message_from_string , generator )
from email . message import Message
from email . mime . text import MIMEText
from email . mime . multipart import MIMEMultipart
from email . mime . base import MIMEBase
from email . mime . message import MIMEMessage
from email . header import Header
from email . utils import formatdate , getaddresses , formataddr , parseaddr
from django . conf import settings
from django . core . mail . utils import DNS_NAME
from django . utils . encoding import force_text
from django . utils import six
utf8_charset = Charset . Charset ( 'utf-8' )
utf8_charset . body_encoding = None
DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
class BadHeaderError ( ValueError ) :
pass
def make_msgid ( idstring = None ) :
timeval = time . time ( )
utcdate = time . strftime ( '%Y%m%d%H%M%S' , time . gmtime ( timeval ) )
try :
pid = os . getpid ( )
except AttributeError :
pid = 1
randint = random . randrange ( 100000 )
if idstring is None :
idstring = ''
else :
idstring = '.' + idstring
idhost = DNS_NAME
msgid = '<%s.%s.%s%s@%s>' % ( utcdate , pid , randint , idstring , idhost )
return msgid
ADDRESS_HEADERS = set ( [ 'from' , 'sender' , 'reply-to' , 'to' , 'cc' , 'bcc' , 'resent-from' , 'resent-sender' , 'resent-to' , 'resent-cc' , 'resent-bcc' , ] )
def forbid_multi_line_headers ( name , val , encoding ) :
encoding = encoding or settings . DEFAULT_CHARSET
val = force_text ( val )
if '\n' in val or '\r' in val :
raise BadHeaderError ( "Header values can't contain newlines (got %r for header %r)" % ( val , name ) )
try :
val . encode ( 'ascii' )
except UnicodeEncodeError :
if name . lower ( ) in ADDRESS_HEADERS :
val = ', ' . join ( sanitize_address ( addr , encoding ) for addr in getaddresses ( ( val , ) ) )
else :
val = Header ( val , encoding ) . encode ( )
else :
if name . lower ( ) == 'subject' :
val = Header ( val ) . encode ( )
return str ( name ) , val
def sanitize_address ( addr , encoding ) :
if isinstance ( addr , six . string_types ) :
addr = parseaddr ( force_text ( addr ) )
nm , addr = addr
try :
nm = Header ( nm , encoding ) . encode ( )
except UnicodeEncodeError :
nm = Header ( nm , 'utf-8' ) . encode ( )
try :
addr . encode ( 'ascii' )
except UnicodeEncodeError :
if '@' in addr :
localpart , domain = addr . split ( '@' , 1 )
localpart = str ( Header ( localpart , encoding ) )
domain = domain . encode ( 'idna' ) . decode ( 'ascii' )
addr = '@' . join ( [ localpart , domain ] )
else :
addr = Header ( addr , encoding ) . encode ( )
return formataddr ( ( nm , addr ) )
class MIMEMixin ( ) :
def as_string ( self , unixfrom = False ) :
fp = six . StringIO ( )
g = generator . Generator ( fp , mangle_from_ = False )
g . flatten ( self , unixfrom = unixfrom )
return fp . getvalue ( )
if six . PY2 :
as_bytes = as_string
else :
def as_bytes ( self , unixfrom = False ) :
fp = six . BytesIO ( )
g = generator . BytesGenerator ( fp , mangle_from_ = False )
g . flatten ( self , unixfrom = unixfrom )
return fp . getvalue ( )
class SafeMIMEMessage ( MIMEMixin , MIMEMessage ) :
def __setitem__ ( self , name , val ) :
name , val = forbid_multi_line_headers ( name , val , 'ascii' )
MIMEMessage . __setitem__ ( self , name , val )
class SafeMIMEText ( MIMEMixin , MIMEText ) :
def __init__ ( self , text , subtype , charset ) :
self . encoding = charset
if charset == 'utf-8' :
MIMEText . __init__ ( self , text , subtype , None )
del self [ 'Content-Transfer-Encoding' ]
if ( 3 , 2 ) < sys . version_info < ( 3 , 3 , 4 ) :
payload = text . encode ( utf8_charset . output_charset )
self . _payload = payload . decode ( 'ascii' , 'surrogateescape' )
self . set_charset ( utf8_charset )
else :
self . set_payload ( text , utf8_charset )
self . replace_header ( 'Content-Type' , 'text/%s; charset="%s"' % ( subtype , charset ) )
else :
MIMEText . __init__ ( self , text , subtype , charset )
def __setitem__ ( self , name , val ) :
name , val = forbid_multi_line_headers ( name , val , self . encoding )
MIMEText . __setitem__ ( self , name , val )
class SafeMIMEMultipart ( MIMEMixin , MIMEMultipart ) :
def __init__ ( self , _subtype = 'mixed' , boundary = None , _subparts = None , encoding = None , ** _params ) :
self . encoding = encoding
MIMEMultipart . __init__ ( self , _subtype , boundary , _subparts , ** _params )
def __setitem__ ( self , name , val ) :
name , val = forbid_multi_line_headers ( name , val , self . encoding )
MIMEMultipart . __setitem__ ( self , name , val )
class EmailMessage ( object ) :
content_subtype = 'plain'
mixed_subtype = 'mixed'
encoding = None
def __init__ ( self , subject = '' , body = '' , from_email = None , to = None , bcc = None , connection = None , attachments = None , headers = None , cc = None ) :
if to :
assert not isinstance ( to , six . string_types ) , '"to" argument must be a list or tuple'
self . to = list ( to )
else :
self . to = [ ]
if cc :
assert not isinstance ( cc , six . string_types ) , '"cc" argument must be a list or tuple'
self . cc = list ( cc )
else :
self . cc = [ ]
if bcc :
assert not isinstance ( bcc , six . string_types ) , '"bcc" argument must be a list or tuple'
self . bcc = list ( bcc )
else :
self . bcc = [ ]
self . from_email = from_email or settings . DEFAULT_FROM_EMAIL
self . subject = subject
self . body = body
self . attachments = attachments or [ ]
self . extra_headers = headers or { }
self . connection = connection
def get_connection ( self , fail_silently = False ) :
from django . core . mail import get_connection
if not self . connection :
self . connection = get_connection ( fail_silently = fail_silently )
return self . connection
def message ( self ) :
encoding = self . encoding or settings . DEFAULT_CHARSET
msg = SafeMIMEText ( self . body , self . content_subtype , encoding )
msg = self . _create_message ( msg )
msg [ 'Subject' ] = self . subject
msg [ 'From' ] = self . extra_headers . get ( 'From' , self . from_email )
msg [ 'To' ] = self . extra_headers . get ( 'To' , ', ' . join ( self . to ) )
if self . cc :
msg [ 'Cc' ] = ', ' . join ( self . cc )
header_names = [ key . lower ( ) for key in self . extra_headers ]
if 'date' not in header_names :
msg [ 'Date' ] = formatdate ( )
if 'message-id' not in header_names :
msg [ 'Message-ID' ] = make_msgid ( )
for name , value in self . extra_headers . items ( ) :
if name . lower ( ) in ( 'from' , 'to' ) :
continue
msg [ name ] = value
return msg
def recipients ( self ) :
return self . to + self . cc + self . bcc
def send ( self , fail_silently = False ) :
if not self . recipients ( ) :
return 0
return self . get_connection ( fail_silently ) . send_messages ( [ self ] )
def attach ( self , filename = None , content = None , mimetype = None ) :
if isinstance ( filename , MIMEBase ) :
assert content is None
assert mimetype is None
self . attachments . append ( filename )
else :
assert content is not None
self . attachments . append ( ( filename , content , mimetype ) )
def attach_file ( self , path , mimetype = None ) :
filename = os . path . basename ( path )
with open ( path , 'rb' ) as f :
content = f . read ( )
self . attach ( filename , content , mimetype )
def _create_message ( self , msg ) :
return self . _create_attachments ( msg )
def _create_attachments ( self , msg ) :
if self . attachments :
encoding = self . encoding or settings . DEFAULT_CHARSET
body_msg = msg
msg = SafeMIMEMultipart ( _subtype = self . mixed_subtype , encoding = encoding )
if self . body :
msg . attach ( body_msg )
for attachment in self . attachments :
if isinstance ( attachment , MIMEBase ) :
msg . attach ( attachment )
else :
msg . attach ( self . _create_attachment ( * attachment ) )
return msg
def _create_mime_attachment ( self , content , mimetype ) :
basetype , subtype = mimetype . split ( '/' , 1 )
if basetype == 'text' :
encoding = self . encoding or settings . DEFAULT_CHARSET
attachment = SafeMIMEText ( content , subtype , encoding )
elif basetype == 'message' and subtype == 'rfc822' :
if isinstance ( content , EmailMessage ) :
content = content . message ( )
elif not isinstance ( content , Message ) :
content = message_from_string ( content )
attachment = SafeMIMEMessage ( content , subtype )
else :
attachment = MIMEBase ( basetype , subtype )
attachment . set_payload ( content )
Encoders . encode_base64 ( attachment )
return attachment
def _create_attachment ( self , filename , content , mimetype = None ) :
if mimetype is None :
mimetype , _ = mimetypes . guess_type ( filename )
if mimetype is None :
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
attachment = self . _create_mime_attachment ( content , mimetype )
if filename :
try :
filename . encode ( 'ascii' )
except UnicodeEncodeError :
if six . PY2 :
filename = filename . encode ( 'utf-8' )
filename = ( 'utf-8' , '' , filename )
attachment . add_header ( 'Content-Disposition' , 'attachment' , filename = filename )
return attachment
class EmailMultiAlternatives ( EmailMessage ) :
alternative_subtype = 'alternative'
def __init__ ( self , subject = '' , body = '' , from_email = None , to = None , bcc = None , connection = None , attachments = None , headers = None , alternatives = None , cc = None ) :
super ( EmailMultiAlternatives , self ) . __init__ ( subject , body , from_email , to , bcc , connection , attachments , headers , cc )
self . alternatives = alternatives or [ ]
def attach_alternative ( self , content , mimetype ) :
assert content is not None
assert mimetype is not None
self . alternatives . append ( ( content , mimetype ) )
def _create_message ( self , msg ) :
return self . _create_attachments ( self . _create_alternatives ( msg ) )
def _create_alternatives ( self , msg ) :
encoding = self . encoding or settings . DEFAULT_CHARSET
if self . alternatives :
body_msg = msg
msg = SafeMIMEMultipart ( _subtype = self . alternative_subtype , encoding = encoding )
if self . body :
msg . attach ( body_msg )
for alternative in self . alternatives :
msg . attach ( self . _create_mime_attachment ( * alternative ) )
return msg
import socket
class CachedDnsName ( object ) :
def __str__ ( self ) :
return self . get_fqdn ( )
def get_fqdn ( self ) :
if not hasattr ( self , '_fqdn' ) :
self . _fqdn = socket . getfqdn ( )
return self . _fqdn
DNS_NAME = CachedDnsName ( )
from __future__ import unicode_literals
import collections
from importlib import import_module
import os
import sys
import django
from django . apps import apps
from django . conf import settings
from django . core . exceptions import ImproperlyConfigured
from django . core . management . base import ( BaseCommand , CommandError , CommandParser , handle_default_options )
from django . core . management . color import color_style
from django . utils import lru_cache
from django . utils import six
def find_commands ( management_dir ) :
command_dir = os . path . join ( management_dir , 'commands' )
try :
return [ f [ : - 3 ] for f in os . listdir ( command_dir ) if not f . startswith ( '_' ) and f . endswith ( '.py' ) ]
except OSError :
return [ ]
def load_command_class ( app_name , name ) :
module = import_module ( '%s.management.commands.%s' % ( app_name , name ) )
return module . Command ( )
@ lru_cache . lru_cache ( maxsize = None )
def get_commands ( ) :
commands = { name : 'django.core' for name in find_commands ( __path__ [ 0 ] ) }
if not settings . configured :
return commands
for app_config in reversed ( list ( apps . get_app_configs ( ) ) ) :
path = os . path . join ( app_config . path , 'management' )
commands . update ( { name : app_config . name for name in find_commands ( path ) } )
return commands
def call_command ( name , * args , ** options ) :
try :
app_name = get_commands ( ) [ name ]
except KeyError :
raise CommandError ( "Unknown command: %r" % name )
if isinstance ( app_name , BaseCommand ) :
command = app_name
else :
command = load_command_class ( app_name , name )
parser = command . create_parser ( '' , name )
if command . use_argparse :
defaults = parser . parse_args ( args = args )
defaults = dict ( defaults . _get_kwargs ( ) , ** options )
else :
defaults , _ = parser . parse_args ( args = [ ] )
defaults = dict ( defaults . __dict__ , ** options )
return command . execute ( * args , ** defaults )
class ManagementUtility ( object ) :
def __init__ ( self , argv = None ) :
self . argv = argv or sys . argv [ : ]
self . prog_name = os . path . basename ( self . argv [ 0 ] )
self . settings_exception = None
def main_help_text ( self , commands_only = False ) :
if commands_only :
usage = sorted ( get_commands ( ) . keys ( ) )
else :
usage = [ "" , "Type '%s help ' for help on a specific subcommand." % self . prog_name , "" , "Available subcommands:" , ]
commands_dict = collections . defaultdict ( lambda : [ ] )
for name , app in six . iteritems ( get_commands ( ) ) :
if app == 'django.core' :
app = 'django'
else :
app = app . rpartition ( '.' ) [ - 1 ]
commands_dict [ app ] . append ( name )
style = color_style ( )
for app in sorted ( commands_dict . keys ( ) ) :
usage . append ( "" )
usage . append ( style . NOTICE ( "[%s]" % app ) )
for name in sorted ( commands_dict [ app ] ) :
usage . append ( " %s" % name )
if self . settings_exception is not None :
usage . append ( style . NOTICE ( "Note that only Django core commands are listed " "as settings are not properly configured (error: %s)." % self . settings_exception ) )
return '\n' . join ( usage )
def fetch_command ( self , subcommand ) :
commands = get_commands ( )
try :
app_name = commands [ subcommand ]
except KeyError :
settings . INSTALLED_APPS
sys . stderr . write ( "Unknown command: %r\nType '%s help' for usage.\n" % ( subcommand , self . prog_name ) )
sys . exit ( 1 )
if isinstance ( app_name , BaseCommand ) :
klass = app_name
else :
klass = load_command_class ( app_name , subcommand )
return klass
def autocomplete ( self ) :
if 'DJANGO_AUTO_COMPLETE' not in os . environ :
return
cwords = os . environ [ 'COMP_WORDS' ] . split ( ) [ 1 : ]
cword = int ( os . environ [ 'COMP_CWORD' ] )
try :
curr = cwords [ cword - 1 ]
except IndexError :
curr = ''
subcommands = list ( get_commands ( ) ) + [ 'help' ]
options = [ ( '--help' , None ) ]
if cword == 1 :
print ( ' ' . join ( sorted ( filter ( lambda x : x . startswith ( curr ) , subcommands ) ) ) )
elif cwords [ 0 ] in subcommands and cwords [ 0 ] != 'help' :
subcommand_cls = self . fetch_command ( cwords [ 0 ] )
if cwords [ 0 ] == 'runfcgi' :
from django . core . servers . fastcgi import FASTCGI_OPTIONS
options += [ ( k , 1 ) for k in FASTCGI_OPTIONS ]
elif cwords [ 0 ] in ( 'dumpdata' , 'sql' , 'sqlall' , 'sqlclear' , 'sqlcustom' , 'sqlindexes' , 'sqlsequencereset' , 'test' ) :
try :
app_configs = apps . get_app_configs ( )
options += [ ( app_config . label , 0 ) for app_config in app_configs ]
except ImportError :
pass
parser = subcommand_cls . create_parser ( '' , cwords [ 0 ] )
if subcommand_cls . use_argparse :
options += [ ( sorted ( s_opt . option_strings ) [ 0 ] , s_opt . nargs != 0 ) for s_opt in parser . _actions if s_opt . option_strings ]
else :
options += [ ( s_opt . get_opt_string ( ) , s_opt . nargs ) for s_opt in parser . option_list ]
prev_opts = [ x . split ( '=' ) [ 0 ] for x in cwords [ 1 : cword - 1 ] ]
options = [ opt for opt in options if opt [ 0 ] not in prev_opts ]
options = sorted ( ( k , v ) for k , v in options if k . startswith ( curr ) )
for option in options :
opt_label = option [ 0 ]
if option [ 1 ] :
opt_label += '='
print ( opt_label )
sys . exit ( 1 )
def execute ( self ) :
try :
subcommand = self . argv [ 1 ]
except IndexError :
subcommand = 'help'
parser = CommandParser ( None , usage = "%(prog)s subcommand [options] [args]" , add_help = False )
parser . add_argument ( '--settings' )
parser . add_argument ( '--pythonpath' )
parser . add_argument ( 'args' , nargs = '*' )
try :
options , args = parser . parse_known_args ( self . argv [ 2 : ] )
handle_default_options ( options )
except CommandError :
pass
no_settings_commands = [ 'help' , 'version' , '--help' , '--version' , '-h' , 'compilemessages' , 'makemessages' , 'startapp' , 'startproject' , ]
try :
settings . INSTALLED_APPS
except ImproperlyConfigured as exc :
self . settings_exception = exc
if subcommand in no_settings_commands :
settings . configure ( )
if settings . configured :
django . setup ( )
self . autocomplete ( )
if subcommand == 'help' :
if '--commands' in args :
sys . stdout . write ( self . main_help_text ( commands_only = True ) + '\n' )
elif len ( options . args ) < 1 :
sys . stdout . write ( self . main_help_text ( ) + '\n' )
else :
self . fetch_command ( options . args [ 0 ] ) . print_help ( self . prog_name , options . args [ 0 ] )
elif subcommand == 'version' or self . argv [ 1 : ] == [ '--version' ] :
sys . stdout . write ( django . get_version ( ) + '\n' )
elif self . argv [ 1 : ] in ( [ '--help' ] , [ '-h' ] ) :
sys . stdout . write ( self . main_help_text ( ) + '\n' )
else :
self . fetch_command ( subcommand ) . run_from_argv ( self . argv )
def execute_from_command_line ( argv = None ) :
utility = ManagementUtility ( argv )
utility . execute ( )
from __future__ import unicode_literals
import os
import sys
import warnings
from argparse import ArgumentParser
from optparse import OptionParser
import django
from django . core import checks
from django . core . exceptions import ImproperlyConfigured
from django . core . management . color import color_style , no_style
from django . utils . deprecation import RemovedInDjango19Warning , RemovedInDjango20Warning
from django . utils . encoding import force_str
class CommandError ( Exception ) :
pass
class CommandParser ( ArgumentParser ) :
def __init__ ( self , cmd , ** kwargs ) :
self . cmd = cmd
super ( CommandParser , self ) . __init__ ( ** kwargs )
def parse_args ( self , args = None , namespace = None ) :
if ( hasattr ( self . cmd , 'missing_args_message' ) and not ( args or any ( [ not arg . startswith ( '-' ) for arg in args ] ) ) ) :
self . error ( self . cmd . missing_args_message )
return super ( CommandParser , self ) . parse_args ( args , namespace )
def error ( self , message ) :
if self . cmd . _called_from_command_line :
super ( CommandParser , self ) . error ( message )
else :
raise CommandError ( "Error: %s" % message )
def handle_default_options ( options ) :
if options . settings :
os . environ [ 'DJANGO_SETTINGS_MODULE' ] = options . settings
if options . pythonpath :
sys . path . insert ( 0 , options . pythonpath )
class OutputWrapper ( object ) :
def __init__ ( self , out , style_func = None , ending = '\n' ) :
self . _out = out
self . style_func = None
if hasattr ( out , 'isatty' ) and out . isatty ( ) :
self . style_func = style_func
self . ending = ending
def __getattr__ ( self , name ) :
return getattr ( self . _out , name )
def write ( self , msg , style_func = None , ending = None ) :
ending = self . ending if ending is None else ending
if ending and not msg . endswith ( ending ) :
msg += ending
style_func = [ f for f in ( style_func , self . style_func , lambda x : x ) if f is not None ] [ 0 ]
self . _out . write ( force_str ( style_func ( msg ) ) )
class BaseCommand ( object ) :
option_list = ( )
help = ''
args = ''
_called_from_command_line = False
can_import_settings = True
output_transaction = False
leave_locale_alone = False
def __init__ ( self ) :
self . style = color_style ( )
has_old_option = hasattr ( self , 'requires_model_validation' )
has_new_option = hasattr ( self , 'requires_system_checks' )
if has_old_option :
warnings . warn ( '"requires_model_validation" is deprecated ' 'in favor of "requires_system_checks".' , RemovedInDjango19Warning )
if has_old_option and has_new_option :
raise ImproperlyConfigured ( 'Command %s defines both "requires_model_validation" ' 'and "requires_system_checks", which is illegal. Use only ' '"requires_system_checks".' % self . __class__ . __name__ )
self . requires_system_checks = ( self . requires_system_checks if has_new_option else self . requires_model_validation if has_old_option else True )
@ property
def use_argparse ( self ) :
return not bool ( self . option_list )
def get_version ( self ) :
return django . get_version ( )
def usage ( self , subcommand ) :
usage = '%%prog %s [options] %s' % ( subcommand , self . args )
if self . help :
return '%s\n\n%s' % ( usage , self . help )
else :
return usage
def create_parser ( self , prog_name , subcommand ) :
if not self . use_argparse :
warnings . warn ( "OptionParser usage for Django management commands " "is deprecated, use ArgumentParser instead" , RemovedInDjango20Warning )
parser = OptionParser ( prog = prog_name , usage = self . usage ( subcommand ) , version = self . get_version ( ) )
parser . add_option ( '-v' , '--verbosity' , action = 'store' , dest = 'verbosity' , default = '1' , type = 'choice' , choices = [ '0' , '1' , '2' , '3' ] , help = 'Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output' )
parser . add_option ( '--settings' , help = 'The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.' )
parser . add_option ( '--pythonpath' , help = 'A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".' ) ,
parser . add_option ( '--traceback' , action = 'store_true' , help = 'Raise on exception' )
parser . add_option ( '--no-color' , action = 'store_true' , dest = 'no_color' , default = False , help = "Don't colorize the command output." )
for opt in self . option_list :
parser . add_option ( opt )
else :
parser = CommandParser ( self , prog = "%s %s" % ( os . path . basename ( prog_name ) , subcommand ) , description = self . help or None )
parser . add_argument ( '--version' , action = 'version' , version = self . get_version ( ) )
parser . add_argument ( '-v' , '--verbosity' , action = 'store' , dest = 'verbosity' , default = '1' , type = int , choices = [ 0 , 1 , 2 , 3 ] , help = 'Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output' )
parser . add_argument ( '--settings' , help = 'The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.' )
parser . add_argument ( '--pythonpath' , help = 'A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".' )
parser . add_argument ( '--traceback' , action = 'store_true' , help = 'Raise on exception' )
parser . add_argument ( '--no-color' , action = 'store_true' , dest = 'no_color' , default = False , help = "Don't colorize the command output." )
if self . args :
parser . add_argument ( 'args' , nargs = '*' )
self . add_arguments ( parser )
return parser
def add_arguments ( self , parser ) :
pass
def print_help ( self , prog_name , subcommand ) :
parser = self . create_parser ( prog_name , subcommand )
parser . print_help ( )
def run_from_argv ( self , argv ) :
self . _called_from_command_line = True
parser = self . create_parser ( argv [ 0 ] , argv [ 1 ] )
if self . use_argparse :
options = parser . parse_args ( argv [ 2 : ] )
cmd_options = vars ( options )
if 'args' in options :
args = options . args
del cmd_options [ 'args' ]
else :
args = ( )
else :
options , args = parser . parse_args ( argv [ 2 : ] )
cmd_options = vars ( options )
handle_default_options ( options )
try :
self . execute ( * args , ** cmd_options )
except Exception as e :
if options . traceback or not isinstance ( e , CommandError ) :
raise
stderr = getattr ( self , 'stderr' , OutputWrapper ( sys . stderr , self . style . ERROR ) )
stderr . write ( '%s: %s' % ( e . __class__ . __name__ , e ) )
sys . exit ( 1 )
def execute ( self , * args , ** options ) :
self . stdout = OutputWrapper ( options . get ( 'stdout' , sys . stdout ) )
if options . get ( 'no_color' ) :
self . style = no_style ( )
self . stderr = OutputWrapper ( options . get ( 'stderr' , sys . stderr ) )
else :
self . stderr = OutputWrapper ( options . get ( 'stderr' , sys . stderr ) , self . style . ERROR )
if self . can_import_settings :
from django . conf import settings
saved_locale = None
if not self . leave_locale_alone :
if not self . can_import_settings :
raise CommandError ( "Incompatible values of 'leave_locale_alone' " "(%s) and 'can_import_settings' (%s) command " "options." % ( self . leave_locale_alone , self . can_import_settings ) )
from django . utils import translation
saved_locale = translation . get_language ( )
translation . activate ( 'en-us' )
try :
if ( self . requires_system_checks and not options . get ( 'skip_validation' ) and not options . get ( 'skip_checks' ) ) :
self . check ( )
output = self . handle ( * args , ** options )
if output :
if self . output_transaction :
from django . db import connections , DEFAULT_DB_ALIAS
connection = connections [ options . get ( 'database' , DEFAULT_DB_ALIAS ) ]
if connection . ops . start_transaction_sql ( ) :
self . stdout . write ( self . style . SQL_KEYWORD ( connection . ops . start_transaction_sql ( ) ) )
self . stdout . write ( output )
if self . output_transaction :
self . stdout . write ( '\n' + self . style . SQL_KEYWORD ( connection . ops . end_transaction_sql ( ) ) )
finally :
if saved_locale is not None :
translation . activate ( saved_locale )
def validate ( self , app_config = None , display_num_errors = False ) :
if app_config is None :
app_configs = None
else :
app_configs = [ app_config ]
return self . check ( app_configs = app_configs , display_num_errors = display_num_errors )
def check ( self , app_configs = None , tags = None , display_num_errors = False ) :
all_issues = checks . run_checks ( app_configs = app_configs , tags = tags )
msg = ""
visible_issue_count = 0
if all_issues :
debugs = [ e for e in all_issues if e . level < checks . INFO and not e . is_silenced ( ) ]
infos = [ e for e in all_issues if checks . INFO <= e . level < checks . WARNING and not e . is_silenced ( ) ]
warnings = [ e for e in all_issues if checks . WARNING <= e . level < checks . ERROR and not e . is_silenced ( ) ]
errors = [ e for e in all_issues if checks . ERROR <= e . level < checks . CRITICAL ]
criticals = [ e for e in all_issues if checks . CRITICAL <= e . level ]
sorted_issues = [ ( criticals , 'CRITICALS' ) , ( errors , 'ERRORS' ) , ( warnings , 'WARNINGS' ) , ( infos , 'INFOS' ) , ( debugs , 'DEBUGS' ) , ]
for issues , group_name in sorted_issues :
if issues :
visible_issue_count += len ( issues )
formatted = ( color_style ( ) . ERROR ( force_str ( e ) ) if e . is_serious ( ) else color_style ( ) . WARNING ( force_str ( e ) ) for e in issues )
formatted = "\n" . join ( sorted ( formatted ) )
msg += '\n%s:\n%s\n' % ( group_name , formatted )
if msg :
msg = "System check identified some issues:\n%s" % msg
if display_num_errors :
if msg :
msg += '\n'
msg += "System check identified %s (%s silenced)." % ( "no issues" if visible_issue_count == 0 else "1 issue" if visible_issue_count == 1 else "%s issues" % visible_issue_count , len ( all_issues ) - visible_issue_count , )
if any ( e . is_serious ( ) and not e . is_silenced ( ) for e in all_issues ) :
raise CommandError ( msg )
elif msg and visible_issue_count :
self . stderr . write ( msg )
elif msg :
self . stdout . write ( msg )
def handle ( self , * args , ** options ) :
raise NotImplementedError ( 'subclasses of BaseCommand must provide a handle() method' )
class AppCommand ( BaseCommand ) :
missing_args_message = "Enter at least one application label."
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'app_label' , nargs = '+' , help = 'One or more application label.' )
def handle ( self , * app_labels , ** options ) :
from django . apps import apps
try :
app_configs = [ apps . get_app_config ( app_label ) for app_label in app_labels ]
except ( LookupError , ImportError ) as e :
raise CommandError ( "%s. Are you sure your INSTALLED_APPS setting is correct?" % e )
output = [ ]
for app_config in app_configs :
app_output = self . handle_app_config ( app_config , ** options )
if app_output :
output . append ( app_output )
return '\n' . join ( output )
def handle_app_config ( self , app_config , ** options ) :
try :
handle_app = self . handle_app
except AttributeError :
raise NotImplementedError ( "Subclasses of AppCommand must provide" "a handle_app_config() method." )
else :
warnings . warn ( "AppCommand.handle_app() is superseded by " "AppCommand.handle_app_config()." , RemovedInDjango19Warning , stacklevel = 2 )
if app_config . models_module is None :
raise CommandError ( "AppCommand cannot handle app '%s' in legacy mode " "because it doesn't have a models module." % app_config . label )
return handle_app ( app_config . models_module , ** options )
class LabelCommand ( BaseCommand ) :
label = 'label'
missing_args_message = "Enter at least one %s." % label
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = self . label , nargs = '+' )
def handle ( self , * labels , ** options ) :
output = [ ]
for label in labels :
label_output = self . handle_label ( label , ** options )
if label_output :
output . append ( label_output )
return '\n' . join ( output )
def handle_label ( self , label , ** options ) :
raise NotImplementedError ( 'subclasses of LabelCommand must provide a handle_label() method' )
class NoArgsCommand ( BaseCommand ) :
args = ''
def __init__ ( self ) :
warnings . warn ( "NoArgsCommand class is deprecated and will be removed in Django 2.0. " "Use BaseCommand instead, which takes no arguments by default." , RemovedInDjango20Warning )
super ( NoArgsCommand , self ) . __init__ ( )
def handle ( self , * args , ** options ) :
if args :
raise CommandError ( "Command doesn't accept any arguments" )
return self . handle_noargs ( ** options )
def handle_noargs ( self , ** options ) :
raise NotImplementedError ( 'subclasses of NoArgsCommand must provide a handle_noargs() method' )
import os
import sys
from django . utils import termcolors
def supports_color ( ) :
plat = sys . platform
supported_platform = plat != 'Pocket PC' and ( plat != 'win32' or 'ANSICON' in os . environ )
is_a_tty = hasattr ( sys . stdout , 'isatty' ) and sys . stdout . isatty ( )
if not supported_platform or not is_a_tty :
return False
return True
def color_style ( ) :
if not supports_color ( ) :
style = no_style ( )
else :
DJANGO_COLORS = os . environ . get ( 'DJANGO_COLORS' , '' )
color_settings = termcolors . parse_color_setting ( DJANGO_COLORS )
if color_settings :
class dummy :
pass
style = dummy ( )
for role in termcolors . PALETTES [ termcolors . NOCOLOR_PALETTE ] :
format = color_settings . get ( role , { } )
setattr ( style , role , termcolors . make_style ( ** format ) )
style . ERROR_OUTPUT = style . ERROR
else :
style = no_style ( )
return style
def no_style ( ) :
class dummy :
def __getattr__ ( self , attr ) :
return lambda x : x
return dummy ( )
from __future__ import unicode_literals
from django . apps import apps
from django . core import checks
from django . core . checks . registry import registry
from django . core . management . base import BaseCommand , CommandError
class Command ( BaseCommand ) :
help = "Checks the entire Django project for potential problems."
requires_system_checks = False
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'app_label' , nargs = '*' )
parser . add_argument ( '--tag' , '-t' , action = 'append' , dest = 'tags' , help = 'Run only checks labeled with given tag.' )
parser . add_argument ( '--list-tags' , action = 'store_true' , dest = 'list_tags' , help = 'List available tags.' )
def handle ( self , * app_labels , ** options ) :
if options . get ( 'list_tags' ) :
self . stdout . write ( '\n' . join ( sorted ( registry . tags_available ( ) ) ) )
return
if app_labels :
app_configs = [ apps . get_app_config ( app_label ) for app_label in app_labels ]
else :
app_configs = None
tags = options . get ( 'tags' , None )
if tags and any ( not checks . tag_exists ( tag ) for tag in tags ) :
invalid_tag = next ( tag for tag in tags if not checks . tag_exists ( tag ) )
raise CommandError ( 'There is no system check with the "%s" tag.' % invalid_tag )
self . check ( app_configs = app_configs , tags = tags , display_num_errors = True )
from __future__ import unicode_literals
import codecs
import glob
import os
from django . core . management . base import BaseCommand , CommandError
from django . core . management . utils import find_command , popen_wrapper
from django . utils . _os import npath , upath
def has_bom ( fn ) :
with open ( fn , 'rb' ) as f :
sample = f . read ( 4 )
return sample [ : 3 ] == b'\xef\xbb\xbf' or sample . startswith ( codecs . BOM_UTF16_LE ) or sample . startswith ( codecs . BOM_UTF16_BE )
def is_writable ( path ) :
try :
with open ( path , 'a' ) :
os . utime ( path , None )
except ( IOError , OSError ) :
return False
return True
class Command ( BaseCommand ) :
help = 'Compiles .po files to .mo files for use with builtin gettext support.'
requires_system_checks = False
leave_locale_alone = True
program = 'msgfmt'
program_options = [ '--check-format' ]
def add_arguments ( self , parser ) :
parser . add_argument ( '--locale' , '-l' , dest = 'locale' , action = 'append' , default = [ ] , help = 'Locale(s) to process (e.g. de_AT). Default is to process all. ' 'Can be used multiple times.' )
parser . add_argument ( '--exclude' , '-x' , dest = 'exclude' , action = 'append' , default = [ ] , help = 'Locales to exclude. Default is none. Can be used multiple times.' )
def handle ( self , ** options ) :
locale = options . get ( 'locale' )
exclude = options . get ( 'exclude' )
self . verbosity = int ( options . get ( 'verbosity' ) )
if find_command ( self . program ) is None :
raise CommandError ( "Can't find %s. Make sure you have GNU gettext " "tools 0.15 or newer installed." % self . program )
basedirs = [ os . path . join ( 'conf' , 'locale' ) , 'locale' ]
if os . environ . get ( 'DJANGO_SETTINGS_MODULE' ) :
from django . conf import settings
basedirs . extend ( [ upath ( path ) for path in settings . LOCALE_PATHS ] )
basedirs = set ( map ( os . path . abspath , filter ( os . path . isdir , basedirs ) ) )
if not basedirs :
raise CommandError ( "This script should be run from the Django Git " "checkout or your project or app tree, or with " "the settings module specified." )
all_locales = [ ]
for basedir in basedirs :
locale_dirs = filter ( os . path . isdir , glob . glob ( '%s/*' % basedir ) )
all_locales . extend ( map ( os . path . basename , locale_dirs ) )
locales = locale or all_locales
locales = set ( locales ) - set ( exclude )
for basedir in basedirs :
if locales :
dirs = [ os . path . join ( basedir , l , 'LC_MESSAGES' ) for l in locales ]
else :
dirs = [ basedir ]
locations = [ ]
for ldir in dirs :
for dirpath , dirnames , filenames in os . walk ( ldir ) :
locations . extend ( ( dirpath , f ) for f in filenames if f . endswith ( '.po' ) )
if locations :
self . compile_messages ( locations )
def compile_messages ( self , locations ) :
for i , ( dirpath , f ) in enumerate ( locations ) :
if self . verbosity > 0 :
self . stdout . write ( 'processing file %s in %s\n' % ( f , dirpath ) )
po_path = os . path . join ( dirpath , f )
if has_bom ( po_path ) :
raise CommandError ( "The %s file has a BOM (Byte Order Mark). " "Django only supports .po files encoded in " "UTF-8 and without any BOM." % po_path )
base_path = os . path . splitext ( po_path ) [ 0 ]
if i == 0 and not is_writable ( npath ( base_path + '.mo' ) ) :
self . stderr . write ( "The po files under %s are in a seemingly not writable location. " "mo files will not be updated/created." % dirpath )
return
args = [ self . program ] + self . program_options + [ '-o' , npath ( base_path + '.mo' ) , npath ( base_path + '.po' ) ]
output , errors , status = popen_wrapper ( args )
if status :
if errors :
msg = "Execution of %s failed: %s" % ( self . program , errors )
else :
msg = "Execution of %s failed" % self . program
raise CommandError ( msg )
from django . conf import settings
from django . core . cache import caches
from django . core . cache . backends . db import BaseDatabaseCache
from django . core . management . base import BaseCommand , CommandError
from django . db import connections , router , transaction , models , DEFAULT_DB_ALIAS
from django . db . utils import DatabaseError
from django . utils . encoding import force_text
class Command ( BaseCommand ) :
help = "Creates the tables needed to use the SQL cache backend."
requires_system_checks = False
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'table_name' , nargs = '*' , help = 'Optional table names. Otherwise, settings.CACHES is used to ' 'find cache tables.' )
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database onto which the cache tables will be ' 'installed. Defaults to the "default" database.' )
def handle ( self , * tablenames , ** options ) :
db = options . get ( 'database' )
self . verbosity = int ( options . get ( 'verbosity' ) )
if len ( tablenames ) :
for tablename in tablenames :
self . create_table ( db , tablename )
else :
for cache_alias in settings . CACHES :
cache = caches [ cache_alias ]
if isinstance ( cache , BaseDatabaseCache ) :
self . create_table ( db , cache . _table )
def create_table ( self , database , tablename ) :
cache = BaseDatabaseCache ( tablename , { } )
if not router . allow_migrate ( database , cache . cache_model_class ) :
return
connection = connections [ database ]
if tablename in connection . introspection . table_names ( ) :
if self . verbosity > 0 :
self . stdout . write ( "Cache table '%s' already exists." % tablename )
return
fields = ( models . CharField ( name = 'cache_key' , max_length = 255 , unique = True , primary_key = True ) , models . TextField ( name = 'value' ) , models . DateTimeField ( name = 'expires' , db_index = True ) , )
table_output = [ ]
index_output = [ ]
qn = connection . ops . quote_name
for f in fields :
field_output = [ qn ( f . name ) , f . db_type ( connection = connection ) ]
field_output . append ( "%sNULL" % ( "NOT " if not f . null else "" ) )
if f . primary_key :
field_output . append ( "PRIMARY KEY" )
elif f . unique :
field_output . append ( "UNIQUE" )
if f . db_index :
unique = "UNIQUE " if f . unique else ""
index_output . append ( "CREATE %sINDEX %s ON %s (%s);" % ( unique , qn ( '%s_%s' % ( tablename , f . name ) ) , qn ( tablename ) , qn ( f . name ) ) )
table_output . append ( " " . join ( field_output ) )
full_statement = [ "CREATE TABLE %s (" % qn ( tablename ) ]
for i , line in enumerate ( table_output ) :
full_statement . append ( ' %s%s' % ( line , ',' if i < len ( table_output ) - 1 else '' ) )
full_statement . append ( ');' )
with transaction . atomic ( using = database , savepoint = connection . features . can_rollback_ddl ) :
with connection . cursor ( ) as curs :
try :
curs . execute ( "\n" . join ( full_statement ) )
except DatabaseError as e :
raise CommandError ( "Cache table '%s' could not be created.\nThe error was: %s." % ( tablename , force_text ( e ) ) )
for statement in index_output :
curs . execute ( statement )
if self . verbosity > 1 :
self . stdout . write ( "Cache table '%s' created." % tablename )
from django . core . management . base import BaseCommand , CommandError
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( BaseCommand ) :
help = ( "Runs the command-line client for specified database, or the " "default database if none is provided." )
requires_system_checks = False
def add_arguments ( self , parser ) :
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database onto which to ' 'open a shell. Defaults to the "default" database.' )
def handle ( self , ** options ) :
connection = connections [ options . get ( 'database' ) ]
try :
connection . client . runshell ( )
except OSError :
raise CommandError ( 'You appear not to have the %r program installed or on your path.' % connection . client . executable_name )
from django . core . management . base import BaseCommand
def module_to_dict ( module , omittable = lambda k : k . startswith ( '_' ) ) :
return dict ( ( k , repr ( v ) ) for k , v in module . __dict__ . items ( ) if not omittable ( k ) )
class Command ( BaseCommand ) :
requires_system_checks = False
def add_arguments ( self , parser ) :
parser . add_argument ( '--all' , action = 'store_true' , dest = 'all' , default = False , help = 'Display all settings, regardless of their value. ' 'Default values are prefixed by "###".' )
def handle ( self , ** options ) :
from django . conf import settings , global_settings
settings . _setup ( )
user_settings = module_to_dict ( settings . _wrapped )
default_settings = module_to_dict ( global_settings )
output = [ ]
for key in sorted ( user_settings ) :
if key not in default_settings :
output . append ( "%s = %s ###" % ( key , user_settings [ key ] ) )
elif user_settings [ key ] != default_settings [ key ] :
output . append ( "%s = %s" % ( key , user_settings [ key ] ) )
elif options [ 'all' ] :
output . append ( "### %s = %s" % ( key , user_settings [ key ] ) )
return '\n' . join ( output )
import warnings
from collections import OrderedDict
from django . apps import apps
from django . core . management . base import BaseCommand , CommandError
from django . core import serializers
from django . db import router , DEFAULT_DB_ALIAS
from django . utils . deprecation import RemovedInDjango19Warning
class Command ( BaseCommand ) :
help = ( "Output the contents of the database as a fixture of the given " "format (using each model's default manager unless --all is " "specified)." )
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'app_label[.ModelName]' , nargs = '*' , help = 'Restricts dumped data to the specified app_label or app_label.ModelName.' )
parser . add_argument ( '--format' , default = 'json' , dest = 'format' , help = 'Specifies the output serialization format for fixtures.' )
parser . add_argument ( '--indent' , default = None , dest = 'indent' , type = int , help = 'Specifies the indent level to use when pretty-printing output.' )
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a specific database to dump fixtures from. ' 'Defaults to the "default" database.' )
parser . add_argument ( '-e' , '--exclude' , dest = 'exclude' , action = 'append' , default = [ ] , help = 'An app_label or app_label.ModelName to exclude ' '(use multiple --exclude to exclude multiple apps/models).' )
parser . add_argument ( '-n' , '--natural' , action = 'store_true' , dest = 'use_natural_keys' , default = False , help = 'Use natural keys if they are available (deprecated: use --natural-foreign instead).' )
parser . add_argument ( '--natural-foreign' , action = 'store_true' , dest = 'use_natural_foreign_keys' , default = False , help = 'Use natural foreign keys if they are available.' )
parser . add_argument ( '--natural-primary' , action = 'store_true' , dest = 'use_natural_primary_keys' , default = False , help = 'Use natural primary keys if they are available.' )
parser . add_argument ( '-a' , '--all' , action = 'store_true' , dest = 'use_base_manager' , default = False , help = "Use Django's base manager to dump all models stored in the database, " "including those that would otherwise be filtered or modified by a custom manager." )
parser . add_argument ( '--pks' , dest = 'primary_keys' , help = "Only dump objects with given primary keys. " "Accepts a comma separated list of keys. " "This option will only work when you specify one model." )
parser . add_argument ( '-o' , '--output' , default = None , dest = 'output' , help = 'Specifies file to which the output is written.' )
def handle ( self , * app_labels , ** options ) :
format = options . get ( 'format' )
indent = options . get ( 'indent' )
using = options . get ( 'database' )
excludes = options . get ( 'exclude' )
output = options . get ( 'output' )
show_traceback = options . get ( 'traceback' )
use_natural_keys = options . get ( 'use_natural_keys' )
if use_natural_keys :
warnings . warn ( "``--natural`` is deprecated; use ``--natural-foreign`` instead." , RemovedInDjango19Warning )
use_natural_foreign_keys = options . get ( 'use_natural_foreign_keys' ) or use_natural_keys
use_natural_primary_keys = options . get ( 'use_natural_primary_keys' )
use_base_manager = options . get ( 'use_base_manager' )
pks = options . get ( 'primary_keys' )
if pks :
primary_keys = pks . split ( ',' )
else :
primary_keys = [ ]
excluded_apps = set ( )
excluded_models = set ( )
for exclude in excludes :
if '.' in exclude :
try :
model = apps . get_model ( exclude )
except LookupError :
raise CommandError ( 'Unknown model in excludes: %s' % exclude )
excluded_models . add ( model )
else :
try :
app_config = apps . get_app_config ( exclude )
except LookupError :
raise CommandError ( 'Unknown app in excludes: %s' % exclude )
excluded_apps . add ( app_config )
if len ( app_labels ) == 0 :
if primary_keys :
raise CommandError ( "You can only use --pks option with one model" )
app_list = OrderedDict ( ( app_config , None ) for app_config in apps . get_app_configs ( ) if app_config . models_module is not None and app_config not in excluded_apps )
else :
if len ( app_labels ) > 1 and primary_keys :
raise CommandError ( "You can only use --pks option with one model" )
app_list = OrderedDict ( )
for label in app_labels :
try :
app_label , model_label = label . split ( '.' )
try :
app_config = apps . get_app_config ( app_label )
except LookupError :
raise CommandError ( "Unknown application: %s" % app_label )
if app_config . models_module is None or app_config in excluded_apps :
continue
try :
model = app_config . get_model ( model_label )
except LookupError :
raise CommandError ( "Unknown model: %s.%s" % ( app_label , model_label ) )
app_list_value = app_list . setdefault ( app_config , [ ] )
if app_list_value is not None :
if model not in app_list_value :
app_list_value . append ( model )
except ValueError :
if primary_keys :
raise CommandError ( "You can only use --pks option with one model" )
app_label = label
try :
app_config = apps . get_app_config ( app_label )
except LookupError :
raise CommandError ( "Unknown application: %s" % app_label )
if app_config . models_module is None or app_config in excluded_apps :
continue
app_list [ app_config ] = None
if format not in serializers . get_public_serializer_formats ( ) :
try :
serializers . get_serializer ( format )
except serializers . SerializerDoesNotExist :
pass
raise CommandError ( "Unknown serialization format: %s" % format )
def get_objects ( ) :
for model in sort_dependencies ( app_list . items ( ) ) :
if model in excluded_models :
continue
if not model . _meta . proxy and router . allow_migrate ( using , model ) :
if use_base_manager :
objects = model . _base_manager
else :
objects = model . _default_manager
queryset = objects . using ( using ) . order_by ( model . _meta . pk . name )
if primary_keys :
queryset = queryset . filter ( pk__in = primary_keys )
for obj in queryset . iterator ( ) :
yield obj
try :
self . stdout . ending = None
stream = open ( output , 'w' ) if output else None
try :
serializers . serialize ( format , get_objects ( ) , indent = indent , use_natural_foreign_keys = use_natural_foreign_keys , use_natural_primary_keys = use_natural_primary_keys , stream = stream or self . stdout )
finally :
if stream :
stream . close ( )
except Exception as e :
if show_traceback :
raise
raise CommandError ( "Unable to serialize database: %s" % e )
def sort_dependencies ( app_list ) :
model_dependencies = [ ]
models = set ( )
for app_config , model_list in app_list :
if model_list is None :
model_list = app_config . get_models ( )
for model in model_list :
models . add ( model )
if hasattr ( model , 'natural_key' ) :
deps = getattr ( model . natural_key , 'dependencies' , [ ] )
if deps :
deps = [ apps . get_model ( dep ) for dep in deps ]
else :
deps = [ ]
for field in model . _meta . fields :
if hasattr ( field . rel , 'to' ) :
rel_model = field . rel . to
if hasattr ( rel_model , 'natural_key' ) and rel_model != model :
deps . append ( rel_model )
for field in model . _meta . many_to_many :
if field . rel . through . _meta . auto_created :
rel_model = field . rel . to
if hasattr ( rel_model , 'natural_key' ) and rel_model != model :
deps . append ( rel_model )
model_dependencies . append ( ( model , deps ) )
model_dependencies . reverse ( )
model_list = [ ]
while model_dependencies :
skipped = [ ]
changed = False
while model_dependencies :
model , deps = model_dependencies . pop ( )
found = True
for candidate in ( ( d not in models or d in model_list ) for d in deps ) :
if not candidate :
found = False
if found :
model_list . append ( model )
changed = True
else :
skipped . append ( ( model , deps ) )
if not changed :
raise CommandError ( "Can't resolve dependencies for %s in serialized app list." % ', ' . join ( '%s.%s' % ( model . _meta . app_label , model . _meta . object_name ) for model , deps in sorted ( skipped , key = lambda obj : obj [ 0 ] . __name__ ) ) )
model_dependencies = skipped
return model_list
import sys
from importlib import import_module
from django . apps import apps
from django . db import connections , router , transaction , DEFAULT_DB_ALIAS
from django . core . management import call_command
from django . core . management . base import BaseCommand , CommandError
from django . core . management . color import no_style
from django . core . management . sql import sql_flush , emit_post_migrate_signal
from django . utils . six . moves import input
from django . utils import six
class Command ( BaseCommand ) :
help = ( 'Removes ALL DATA from the database, including data added during ' 'migrations. Unmigrated apps will also have their initial_data ' 'fixture reloaded. Does not achieve a "fresh install" state.' )
def add_arguments ( self , parser ) :
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' )
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to flush. Defaults to the "default" database.' )
parser . add_argument ( '--no-initial-data' , action = 'store_false' , dest = 'load_initial_data' , default = True , help = 'Tells Django not to load any initial data after database synchronization.' )
def handle ( self , ** options ) :
database = options . get ( 'database' )
connection = connections [ database ]
verbosity = options . get ( 'verbosity' )
interactive = options . get ( 'interactive' )
reset_sequences = options . get ( 'reset_sequences' , True )
allow_cascade = options . get ( 'allow_cascade' , False )
inhibit_post_migrate = options . get ( 'inhibit_post_migrate' , False )
self . style = no_style ( )
for app_config in apps . get_app_configs ( ) :
try :
import_module ( '.management' , app_config . name )
except ImportError :
pass
sql_list = sql_flush ( self . style , connection , only_django = True , reset_sequences = reset_sequences , allow_cascade = allow_cascade )
if interactive :
confirm = input ( """You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, and return each table to an empty state. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection . settings_dict [ 'NAME' ] )
else :
confirm = 'yes'
if confirm == 'yes' :
try :
with transaction . atomic ( using = database , savepoint = connection . features . can_rollback_ddl ) :
with connection . cursor ( ) as cursor :
for sql in sql_list :
cursor . execute ( sql )
except Exception as e :
new_msg = ( "Database %s couldn't be flushed. Possible reasons:\n" " * The database isn't running or isn't configured correctly.\n" " * At least one of the expected database tables doesn't exist.\n" " * The SQL was invalid.\n" "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n" "The full error: %s" ) % ( connection . settings_dict [ 'NAME' ] , e )
six . reraise ( CommandError , CommandError ( new_msg ) , sys . exc_info ( ) [ 2 ] )
if not inhibit_post_migrate :
self . emit_post_migrate ( verbosity , interactive , database )
if options . get ( 'load_initial_data' ) :
call_command ( 'loaddata' , 'initial_data' , ** options )
else :
self . stdout . write ( "Flush cancelled.\n" )
@ staticmethod
def emit_post_migrate ( verbosity , interactive , database ) :
all_models = [ ]
for app_config in apps . get_app_configs ( ) :
all_models . extend ( router . get_migratable_models ( app_config , database , include_auto_created = True ) )
emit_post_migrate_signal ( set ( all_models ) , verbosity , interactive , database )
from __future__ import unicode_literals
from collections import OrderedDict
import keyword
import re
from django . core . management . base import BaseCommand , CommandError
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( BaseCommand ) :
help = "Introspects the database tables in the given database and outputs a Django model module."
requires_system_checks = False
db_module = 'django.db'
def add_arguments ( self , parser ) :
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to ' 'introspect. Defaults to using the "default" database.' )
def handle ( self , ** options ) :
try :
for line in self . handle_inspection ( options ) :
self . stdout . write ( "%s\n" % line )
except NotImplementedError :
raise CommandError ( "Database inspection isn't supported for the currently selected database backend." )
def handle_inspection ( self , options ) :
connection = connections [ options [ 'database' ] ]
table_name_filter = options . get ( 'table_name_filter' )
table2model = lambda table_name : re . sub ( r'[^a-zA-Z0-9]' , '' , table_name . title ( ) )
strip_prefix = lambda s : s [ 1 : ] if s . startswith ( "u'" ) else s
with connection . cursor ( ) as cursor :
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield "# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table"
yield "# Feel free to rename the models, but don't rename db_table values or field names."
yield "#"
yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [app_label]'"
yield "# into your database."
yield "from __future__ import unicode_literals"
yield ''
yield 'from %s import models' % self . db_module
known_models = [ ]
for table_name in connection . introspection . table_names ( cursor ) :
if table_name_filter is not None and callable ( table_name_filter ) :
if not table_name_filter ( table_name ) :
continue
yield ''
yield ''
yield 'class %s(models.Model):' % table2model ( table_name )
known_models . append ( table2model ( table_name ) )
try :
relations = connection . introspection . get_relations ( cursor , table_name )
except NotImplementedError :
relations = { }
try :
indexes = connection . introspection . get_indexes ( cursor , table_name )
except NotImplementedError :
indexes = { }
used_column_names = [ ]
for i , row in enumerate ( connection . introspection . get_table_description ( cursor , table_name ) ) :
comment_notes = [ ]
extra_params = OrderedDict ( )
column_name = row [ 0 ]
is_relation = i in relations
att_name , params , notes = self . normalize_col_name ( column_name , used_column_names , is_relation )
extra_params . update ( params )
comment_notes . extend ( notes )
used_column_names . append ( att_name )
if column_name in indexes :
if indexes [ column_name ] [ 'primary_key' ] :
extra_params [ 'primary_key' ] = True
elif indexes [ column_name ] [ 'unique' ] :
extra_params [ 'unique' ] = True
if is_relation :
rel_to = "self" if relations [ i ] [ 1 ] == table_name else table2model ( relations [ i ] [ 1 ] )
if rel_to in known_models :
field_type = 'ForeignKey(%s' % rel_to
else :
field_type = "ForeignKey('%s'" % rel_to
else :
field_type , field_params , field_notes = self . get_field_type ( connection , table_name , row )
extra_params . update ( field_params )
comment_notes . extend ( field_notes )
field_type += '('
if att_name == 'id' and extra_params == { 'primary_key' : True } :
if field_type == 'AutoField(' :
continue
elif field_type == 'IntegerField(' and not connection . features . can_introspect_autofield :
comment_notes . append ( 'AutoField?' )
if row [ 6 ] :
if field_type == 'BooleanField(' :
field_type = 'NullBooleanField('
else :
extra_params [ 'blank' ] = True
if field_type not in ( 'TextField(' , 'CharField(' ) :
extra_params [ 'null' ] = True
field_desc = '%s = %s%s' % ( att_name , '' if '.' in field_type else 'models.' , field_type , )
if extra_params :
if not field_desc . endswith ( '(' ) :
field_desc += ', '
field_desc += ', ' . join ( [ '%s=%s' % ( k , strip_prefix ( repr ( v ) ) ) for k , v in extra_params . items ( ) ] )
field_desc += ')'
if comment_notes :
field_desc += ' # ' + ' ' . join ( comment_notes )
yield ' %s' % field_desc
for meta_line in self . get_meta ( table_name ) :
yield meta_line
def normalize_col_name ( self , col_name , used_column_names , is_relation ) :
field_params = { }
field_notes = [ ]
new_name = col_name . lower ( )
if new_name != col_name :
field_notes . append ( 'Field name made lowercase.' )
if is_relation :
if new_name . endswith ( '_id' ) :
new_name = new_name [ : - 3 ]
else :
field_params [ 'db_column' ] = col_name
new_name , num_repl = re . subn ( r'\W' , '_' , new_name )
if num_repl > 0 :
field_notes . append ( 'Field renamed to remove unsuitable characters.' )
if new_name . find ( '__' ) >= 0 :
while new_name . find ( '__' ) >= 0 :
new_name = new_name . replace ( '__' , '_' )
if col_name . lower ( ) . find ( '__' ) >= 0 :
field_notes . append ( "Field renamed because it contained more than one '_' in a row." )
if new_name . startswith ( '_' ) :
new_name = 'field%s' % new_name
field_notes . append ( "Field renamed because it started with '_'." )
if new_name . endswith ( '_' ) :
new_name = '%sfield' % new_name
field_notes . append ( "Field renamed because it ended with '_'." )
if keyword . iskeyword ( new_name ) :
new_name += '_field'
field_notes . append ( 'Field renamed because it was a Python reserved word.' )
if new_name [ 0 ] . isdigit ( ) :
new_name = 'number_%s' % new_name
field_notes . append ( "Field renamed because it wasn't a valid Python identifier." )
if new_name in used_column_names :
num = 0
while '%s_%d' % ( new_name , num ) in used_column_names :
num += 1
new_name = '%s_%d' % ( new_name , num )
field_notes . append ( 'Field renamed because of name conflict.' )
if col_name != new_name and field_notes :
field_params [ 'db_column' ] = col_name
return new_name , field_params , field_notes
def get_field_type ( self , connection , table_name , row ) :
field_params = OrderedDict ( )
field_notes = [ ]
try :
field_type = connection . introspection . get_field_type ( row [ 1 ] , row )
except KeyError :
field_type = 'TextField'
field_notes . append ( 'This field type is a guess.' )
if type ( field_type ) is tuple :
field_type , new_params = field_type
field_params . update ( new_params )
if field_type == 'CharField' and row [ 3 ] :
field_params [ 'max_length' ] = int ( row [ 3 ] )
if field_type == 'DecimalField' :
if row [ 4 ] is None or row [ 5 ] is None :
field_notes . append ( 'max_digits and decimal_places have been guessed, as this ' 'database handles decimal fields as float' )
field_params [ 'max_digits' ] = row [ 4 ] if row [ 4 ] is not None else 10
field_params [ 'decimal_places' ] = row [ 5 ] if row [ 5 ] is not None else 5
else :
field_params [ 'max_digits' ] = row [ 4 ]
field_params [ 'decimal_places' ] = row [ 5 ]
return field_type , field_params , field_notes
def get_meta ( self , table_name ) :
return [ "" , " class Meta:" , " managed = False" , " db_table = '%s'" % table_name ]
from __future__ import unicode_literals
import glob
import gzip
import os
import warnings
import zipfile
from django . apps import apps
from django . conf import settings
from django . core import serializers
from django . core . management . base import BaseCommand , CommandError
from django . core . management . color import no_style
from django . db import ( connections , router , transaction , DEFAULT_DB_ALIAS , IntegrityError , DatabaseError )
from django . utils import lru_cache
from django . utils . encoding import force_text
from django . utils . functional import cached_property
from django . utils . _os import upath
from itertools import product
try :
import bz2
has_bz2 = True
except ImportError :
has_bz2 = False
class Command ( BaseCommand ) :
help = 'Installs the named fixture(s) in the database.'
missing_args_message = ( "No database fixture specified. Please provide the " "path of at least one fixture in the command line." )
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'fixture' , nargs = '+' , help = 'Fixture labels.' )
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a specific database to load ' 'fixtures into. Defaults to the "default" database.' )
parser . add_argument ( '--app' , action = 'store' , dest = 'app_label' , default = None , help = 'Only look for fixtures in the specified app.' )
parser . add_argument ( '--ignorenonexistent' , '-i' , action = 'store_true' , dest = 'ignore' , default = False , help = 'Ignores entries in the serialized data for fields that do not ' 'currently exist on the model.' )
def handle ( self , * fixture_labels , ** options ) :
self . ignore = options . get ( 'ignore' )
self . using = options . get ( 'database' )
self . app_label = options . get ( 'app_label' )
self . hide_empty = options . get ( 'hide_empty' , False )
self . verbosity = options . get ( 'verbosity' )
with transaction . atomic ( using = self . using ) :
self . loaddata ( fixture_labels )
if transaction . get_autocommit ( self . using ) :
connections [ self . using ] . close ( )
def loaddata ( self , fixture_labels ) :
connection = connections [ self . using ]
self . fixture_count = 0
self . loaded_object_count = 0
self . fixture_object_count = 0
self . models = set ( )
self . serialization_formats = serializers . get_public_serializer_formats ( )
self . compression_formats = { None : ( open , 'rb' ) , 'gz' : ( gzip . GzipFile , 'rb' ) , 'zip' : ( SingleZipReader , 'r' ) , }
if has_bz2 :
self . compression_formats [ 'bz2' ] = ( bz2 . BZ2File , 'r' )
with connection . constraint_checks_disabled ( ) :
for fixture_label in fixture_labels :
self . load_label ( fixture_label )
table_names = [ model . _meta . db_table for model in self . models ]
try :
connection . check_constraints ( table_names = table_names )
except Exception as e :
e . args = ( "Problem installing fixtures: %s" % e , )
raise
if self . loaded_object_count > 0 :
sequence_sql = connection . ops . sequence_reset_sql ( no_style ( ) , self . models )
if sequence_sql :
if self . verbosity >= 2 :
self . stdout . write ( "Resetting sequences\n" )
with connection . cursor ( ) as cursor :
for line in sequence_sql :
cursor . execute ( line )
if self . verbosity >= 1 :
if self . fixture_count == 0 and self . hide_empty :
pass
elif self . fixture_object_count == self . loaded_object_count :
self . stdout . write ( "Installed %d object(s) from %d fixture(s)" % ( self . loaded_object_count , self . fixture_count ) )
else :
self . stdout . write ( "Installed %d object(s) (of %d) from %d fixture(s)" % ( self . loaded_object_count , self . fixture_object_count , self . fixture_count ) )
def load_label ( self , fixture_label ) :
for fixture_file , fixture_dir , fixture_name in self . find_fixtures ( fixture_label ) :
_ , ser_fmt , cmp_fmt = self . parse_name ( os . path . basename ( fixture_file ) )
open_method , mode = self . compression_formats [ cmp_fmt ]
fixture = open_method ( fixture_file , mode )
try :
self . fixture_count += 1
objects_in_fixture = 0
loaded_objects_in_fixture = 0
if self . verbosity >= 2 :
self . stdout . write ( "Installing %s fixture '%s' from %s." % ( ser_fmt , fixture_name , humanize ( fixture_dir ) ) )
objects = serializers . deserialize ( ser_fmt , fixture , using = self . using , ignorenonexistent = self . ignore )
for obj in objects :
objects_in_fixture += 1
if router . allow_migrate ( self . using , obj . object . __class__ ) :
loaded_objects_in_fixture += 1
self . models . add ( obj . object . __class__ )
try :
obj . save ( using = self . using )
except ( DatabaseError , IntegrityError ) as e :
e . args = ( "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { 'app_label' : obj . object . _meta . app_label , 'object_name' : obj . object . _meta . object_name , 'pk' : obj . object . pk , 'error_msg' : force_text ( e ) } , )
raise
self . loaded_object_count += loaded_objects_in_fixture
self . fixture_object_count += objects_in_fixture
except Exception as e :
if not isinstance ( e , CommandError ) :
e . args = ( "Problem installing fixture '%s': %s" % ( fixture_file , e ) , )
raise
finally :
fixture . close ( )
if objects_in_fixture == 0 :
warnings . warn ( "No fixture data found for '%s'. (File format may be " "invalid.)" % fixture_name , RuntimeWarning )
@ lru_cache . lru_cache ( maxsize = None )
def find_fixtures ( self , fixture_label ) :
fixture_name , ser_fmt , cmp_fmt = self . parse_name ( fixture_label )
databases = [ self . using , None ]
cmp_fmts = list ( self . compression_formats . keys ( ) ) if cmp_fmt is None else [ cmp_fmt ]
ser_fmts = serializers . get_public_serializer_formats ( ) if ser_fmt is None else [ ser_fmt ]
if self . verbosity >= 2 :
self . stdout . write ( "Loading '%s' fixtures..." % fixture_name )
if os . path . isabs ( fixture_name ) :
fixture_dirs = [ os . path . dirname ( fixture_name ) ]
fixture_name = os . path . basename ( fixture_name )
else :
fixture_dirs = self . fixture_dirs
if os . path . sep in fixture_name :
fixture_dirs = [ os . path . join ( dir_ , os . path . dirname ( fixture_name ) ) for dir_ in fixture_dirs ]
fixture_name = os . path . basename ( fixture_name )
suffixes = ( '.' . join ( ext for ext in combo if ext ) for combo in product ( databases , ser_fmts , cmp_fmts ) )
targets = set ( '.' . join ( ( fixture_name , suffix ) ) for suffix in suffixes )
fixture_files = [ ]
for fixture_dir in fixture_dirs :
if self . verbosity >= 2 :
self . stdout . write ( "Checking %s for fixtures..." % humanize ( fixture_dir ) )
fixture_files_in_dir = [ ]
for candidate in glob . iglob ( os . path . join ( fixture_dir , fixture_name + '*' ) ) :
if os . path . basename ( candidate ) in targets :
fixture_files_in_dir . append ( ( candidate , fixture_dir , fixture_name ) )
if self . verbosity >= 2 and not fixture_files_in_dir :
self . stdout . write ( "No fixture '%s' in %s." % ( fixture_name , humanize ( fixture_dir ) ) )
if len ( fixture_files_in_dir ) > 1 :
raise CommandError ( "Multiple fixtures named '%s' in %s. Aborting." % ( fixture_name , humanize ( fixture_dir ) ) )
fixture_files . extend ( fixture_files_in_dir )
if fixture_name != 'initial_data' and not fixture_files :
warnings . warn ( "No fixture named '%s' found." % fixture_name )
return fixture_files
@ cached_property
def fixture_dirs ( self ) :
dirs = [ ]
for app_config in apps . get_app_configs ( ) :
if self . app_label and app_config . label != self . app_label :
continue
app_dir = os . path . join ( app_config . path , 'fixtures' )
if os . path . isdir ( app_dir ) :
dirs . append ( app_dir )
dirs . extend ( list ( settings . FIXTURE_DIRS ) )
dirs . append ( '' )
dirs = [ upath ( os . path . abspath ( os . path . realpath ( d ) ) ) for d in dirs ]
return dirs
def parse_name ( self , fixture_name ) :
parts = fixture_name . rsplit ( '.' , 2 )
if len ( parts ) > 1 and parts [ - 1 ] in self . compression_formats :
cmp_fmt = parts [ - 1 ]
parts = parts [ : - 1 ]
else :
cmp_fmt = None
if len ( parts ) > 1 :
if parts [ - 1 ] in self . serialization_formats :
ser_fmt = parts [ - 1 ]
parts = parts [ : - 1 ]
else :
raise CommandError ( "Problem installing fixture '%s': %s is not a known " "serialization format." % ( '' . join ( parts [ : - 1 ] ) , parts [ - 1 ] ) )
else :
ser_fmt = None
name = '.' . join ( parts )
return name , ser_fmt , cmp_fmt
class SingleZipReader ( zipfile . ZipFile ) :
def __init__ ( self , * args , ** kwargs ) :
zipfile . ZipFile . __init__ ( self , * args , ** kwargs )
if len ( self . namelist ( ) ) != 1 :
raise ValueError ( "Zip-compressed fixtures must contain one file." )
def read ( self ) :
return zipfile . ZipFile . read ( self , self . namelist ( ) [ 0 ] )
def humanize ( dirname ) :
return "'%s'" % dirname if dirname else 'absolute path'
from __future__ import unicode_literals
import fnmatch
import glob
import io
import os
import re
import sys
from itertools import dropwhile
import django
from django . core . management . base import CommandError , BaseCommand
from django . core . management . utils import ( handle_extensions , find_command , popen_wrapper )
from django . utils . encoding import force_str
from django . utils . functional import total_ordering
from django . utils import six
from django . utils . text import get_text_list
from django . utils . jslex import prepare_js_for_gettext
plural_forms_re = re . compile ( r'^(?P"Plural-Forms.+?\\n")\s*$' , re . MULTILINE | re . DOTALL )
STATUS_OK = 0
def check_programs ( * programs ) :
for program in programs :
if find_command ( program ) is None :
raise CommandError ( "Can't find %s. Make sure you have GNU " "gettext tools 0.15 or newer installed." % program )
@ total_ordering
class TranslatableFile ( object ) :
def __init__ ( self , dirpath , file_name , locale_dir ) :
self . file = file_name
self . dirpath = dirpath
self . locale_dir = locale_dir
def __repr__ ( self ) :
return "" % os . sep . join ( [ self . dirpath , self . file ] )
def __eq__ ( self , other ) :
return self . path == other . path
def __lt__ ( self , other ) :
return self . path < other . path
@ property
def path ( self ) :
return os . path . join ( self . dirpath , self . file )
def process ( self , command , domain ) :
from django . conf import settings
from django . utils . translation import templatize
if command . verbosity > 1 :
command . stdout . write ( 'processing file %s in %s\n' % ( self . file , self . dirpath ) )
_ , file_ext = os . path . splitext ( self . file )
if domain == 'djangojs' and file_ext in command . extensions :
is_templatized = True
orig_file = os . path . join ( self . dirpath , self . file )
with io . open ( orig_file , encoding = settings . FILE_CHARSET ) as fp :
src_data = fp . read ( )
src_data = prepare_js_for_gettext ( src_data )
thefile = '%s.c' % self . file
work_file = os . path . join ( self . dirpath , thefile )
with io . open ( work_file , "w" , encoding = 'utf-8' ) as fp :
fp . write ( src_data )
args = [ 'xgettext' , '-d' , domain , '--language=C' , '--keyword=gettext_noop' , '--keyword=gettext_lazy' , '--keyword=ngettext_lazy:1,2' , '--keyword=pgettext:1c,2' , '--keyword=npgettext:1c,2,3' , '--output=-' ] + command . xgettext_options
args . append ( work_file )
elif domain == 'django' and ( file_ext == '.py' or file_ext in command . extensions ) :
thefile = self . file
orig_file = os . path . join ( self . dirpath , self . file )
is_templatized = file_ext in command . extensions
if is_templatized :
with io . open ( orig_file , 'r' , encoding = settings . FILE_CHARSET ) as fp :
src_data = fp . read ( )
thefile = '%s.py' % self . file
content = templatize ( src_data , orig_file [ 2 : ] )
with io . open ( os . path . join ( self . dirpath , thefile ) , "w" , encoding = 'utf-8' ) as fp :
fp . write ( content )
work_file = os . path . join ( self . dirpath , thefile )
args = [ 'xgettext' , '-d' , domain , '--language=Python' , '--keyword=gettext_noop' , '--keyword=gettext_lazy' , '--keyword=ngettext_lazy:1,2' , '--keyword=ugettext_noop' , '--keyword=ugettext_lazy' , '--keyword=ungettext_lazy:1,2' , '--keyword=pgettext:1c,2' , '--keyword=npgettext:1c,2,3' , '--keyword=pgettext_lazy:1c,2' , '--keyword=npgettext_lazy:1c,2,3' , '--output=-' ] + command . xgettext_options
args . append ( work_file )
else :
return
msgs , errors , status = popen_wrapper ( args )
if errors :
if status != STATUS_OK :
if is_templatized :
os . unlink ( work_file )
raise CommandError ( "errors happened while running xgettext on %s\n%s" % ( self . file , errors ) )
elif command . verbosity > 0 :
command . stdout . write ( errors )
if msgs :
if six . PY2 :
msgs = msgs . decode ( 'utf-8' )
potfile = os . path . join ( self . locale_dir , '%s.pot' % str ( domain ) )
if is_templatized :
if os . name == 'nt' :
old = '#: ' + work_file
new = '#: ' + orig_file
else :
old = '#: ' + work_file [ 2 : ]
new = '#: ' + orig_file [ 2 : ]
msgs = msgs . replace ( old , new )
write_pot_file ( potfile , msgs )
if is_templatized :
os . unlink ( work_file )
def write_pot_file ( potfile , msgs ) :
if os . path . exists ( potfile ) :
msgs = '\n' . join ( dropwhile ( len , msgs . split ( '\n' ) ) )
else :
msgs = msgs . replace ( 'charset=CHARSET' , 'charset=UTF-8' )
with io . open ( potfile , 'a' , encoding = 'utf-8' ) as fp :
fp . write ( msgs )
class Command ( BaseCommand ) :
help = ( "Runs over the entire source tree of the current directory and " "pulls out all strings marked for translation. It creates (or updates) a message " "file in the conf/locale (in the django tree) or locale (for projects and " "applications) directory.\n\nYou must run this command with one of either the " "--locale, --exclude or --all options." )
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = [ '-q' , '--previous' ]
msguniq_options = [ '--to-code=utf-8' ]
msgattrib_options = [ '--no-obsolete' ]
xgettext_options = [ '--from-code=UTF-8' , '--add-comments=Translators' ]
def add_arguments ( self , parser ) :
parser . add_argument ( '--locale' , '-l' , default = [ ] , dest = 'locale' , action = 'append' , help = 'Creates or updates the message files for the given locale(s) (e.g. pt_BR). ' 'Can be used multiple times.' )
parser . add_argument ( '--exclude' , '-x' , default = [ ] , dest = 'exclude' , action = 'append' , help = 'Locales to exclude. Default is none. Can be used multiple times.' )
parser . add_argument ( '--domain' , '-d' , default = 'django' , dest = 'domain' , help = 'The domain of the message files (default: "django").' )
parser . add_argument ( '--all' , '-a' , action = 'store_true' , dest = 'all' , default = False , help = 'Updates the message files for all existing locales.' )
parser . add_argument ( '--extension' , '-e' , dest = 'extensions' , help = 'The file extension(s) to examine (default: "html,txt", or "js" ' 'if the domain is "djangojs"). Separate multiple extensions with ' 'commas, or use -e multiple times.' , action = 'append' )
parser . add_argument ( '--symlinks' , '-s' , action = 'store_true' , dest = 'symlinks' , default = False , help = 'Follows symlinks to directories when examining ' 'source code and templates for translation strings.' )
parser . add_argument ( '--ignore' , '-i' , action = 'append' , dest = 'ignore_patterns' , default = [ ] , metavar = 'PATTERN' , help = 'Ignore files or directories matching this glob-style pattern. ' 'Use multiple times to ignore more.' )
parser . add_argument ( '--no-default-ignore' , action = 'store_false' , dest = 'use_default_ignore_patterns' , default = True , help = "Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'." )
parser . add_argument ( '--no-wrap' , action = 'store_true' , dest = 'no_wrap' , default = False , help = "Don't break long message lines into several lines." )
parser . add_argument ( '--no-location' , action = 'store_true' , dest = 'no_location' , default = False , help = "Don't write '#: filename:line' lines." )
parser . add_argument ( '--no-obsolete' , action = 'store_true' , dest = 'no_obsolete' , default = False , help = "Remove obsolete message strings." )
parser . add_argument ( '--keep-pot' , action = 'store_true' , dest = 'keep_pot' , default = False , help = "Keep .pot file after making messages. Useful when debugging." )
def handle ( self , * args , ** options ) :
locale = options . get ( 'locale' )
exclude = options . get ( 'exclude' )
self . domain = options . get ( 'domain' )
self . verbosity = options . get ( 'verbosity' )
process_all = options . get ( 'all' )
extensions = options . get ( 'extensions' )
self . symlinks = options . get ( 'symlinks' )
ignore_patterns = options . get ( 'ignore_patterns' )
if options . get ( 'use_default_ignore_patterns' ) :
ignore_patterns += [ 'CVS' , '.*' , '*~' , '*.pyc' ]
self . ignore_patterns = list ( set ( ignore_patterns ) )
if options . get ( 'no_wrap' ) :
self . msgmerge_options = self . msgmerge_options [ : ] + [ '--no-wrap' ]
self . msguniq_options = self . msguniq_options [ : ] + [ '--no-wrap' ]
self . msgattrib_options = self . msgattrib_options [ : ] + [ '--no-wrap' ]
self . xgettext_options = self . xgettext_options [ : ] + [ '--no-wrap' ]
if options . get ( 'no_location' ) :
self . msgmerge_options = self . msgmerge_options [ : ] + [ '--no-location' ]
self . msguniq_options = self . msguniq_options [ : ] + [ '--no-location' ]
self . msgattrib_options = self . msgattrib_options [ : ] + [ '--no-location' ]
self . xgettext_options = self . xgettext_options [ : ] + [ '--no-location' ]
self . no_obsolete = options . get ( 'no_obsolete' )
self . keep_pot = options . get ( 'keep_pot' )
if self . domain not in ( 'django' , 'djangojs' ) :
raise CommandError ( "currently makemessages only supports domains " "'django' and 'djangojs'" )
if self . domain == 'djangojs' :
exts = extensions if extensions else [ 'js' ]
else :
exts = extensions if extensions else [ 'html' , 'txt' ]
self . extensions = handle_extensions ( exts )
if ( locale is None and not exclude and not process_all ) or self . domain is None :
raise CommandError ( "Type '%s help %s' for usage information." % ( os . path . basename ( sys . argv [ 0 ] ) , sys . argv [ 1 ] ) )
from django . conf import settings
if settings . configured :
settings . USE_I18N = True
else :
settings . configure ( USE_I18N = True )
if self . verbosity > 1 :
self . stdout . write ( 'examining files with the extensions: %s\n' % get_text_list ( list ( self . extensions ) , 'and' ) )
self . invoked_for_django = False
self . locale_paths = [ ]
self . default_locale_path = None
if os . path . isdir ( os . path . join ( 'conf' , 'locale' ) ) :
self . locale_paths = [ os . path . abspath ( os . path . join ( 'conf' , 'locale' ) ) ]
self . default_locale_path = self . locale_paths [ 0 ]
self . invoked_for_django = True
else :
self . locale_paths . extend ( list ( settings . LOCALE_PATHS ) )
if os . path . isdir ( 'locale' ) :
self . locale_paths . append ( os . path . abspath ( 'locale' ) )
if self . locale_paths :
self . default_locale_path = self . locale_paths [ 0 ]
if not os . path . exists ( self . default_locale_path ) :
os . makedirs ( self . default_locale_path )
locale_dirs = filter ( os . path . isdir , glob . glob ( '%s/*' % self . default_locale_path ) )
all_locales = map ( os . path . basename , locale_dirs )
if process_all :
locales = all_locales
else :
locales = locale or all_locales
locales = set ( locales ) - set ( exclude )
if locales :
check_programs ( 'msguniq' , 'msgmerge' , 'msgattrib' )
check_programs ( 'xgettext' )
try :
potfiles = self . build_potfiles ( )
for locale in locales :
if self . verbosity > 0 :
self . stdout . write ( "processing locale %s\n" % locale )
for potfile in potfiles :
self . write_po_file ( potfile , locale )
finally :
if not self . keep_pot :
self . remove_potfiles ( )
def build_potfiles ( self ) :
file_list = self . find_files ( "." )
self . remove_potfiles ( )
for f in file_list :
try :
f . process ( self , self . domain )
except UnicodeDecodeError :
self . stdout . write ( "UnicodeDecodeError: skipped file %s in %s" % ( f . file , f . dirpath ) )
potfiles = [ ]
for path in self . locale_paths :
potfile = os . path . join ( path , '%s.pot' % str ( self . domain ) )
if not os . path . exists ( potfile ) :
continue
args = [ 'msguniq' ] + self . msguniq_options + [ potfile ]
msgs , errors , status = popen_wrapper ( args )
if six . PY2 :
msgs = msgs . decode ( 'utf-8' )
if errors :
if status != STATUS_OK :
raise CommandError ( "errors happened while running msguniq\n%s" % errors )
elif self . verbosity > 0 :
self . stdout . write ( errors )
with io . open ( potfile , 'w' , encoding = 'utf-8' ) as fp :
fp . write ( msgs )
potfiles . append ( potfile )
return potfiles
def remove_potfiles ( self ) :
for path in self . locale_paths :
pot_path = os . path . join ( path , '%s.pot' % str ( self . domain ) )
if os . path . exists ( pot_path ) :
os . unlink ( pot_path )
def find_files ( self , root ) :
def is_ignored ( path , ignore_patterns ) :
filename = os . path . basename ( path )
ignore = lambda pattern : fnmatch . fnmatchcase ( filename , pattern )
return any ( ignore ( pattern ) for pattern in ignore_patterns )
dir_suffix = '%s*' % os . sep
norm_patterns = [ p [ : - len ( dir_suffix ) ] if p . endswith ( dir_suffix ) else p for p in self . ignore_patterns ]
all_files = [ ]
for dirpath , dirnames , filenames in os . walk ( root , topdown = True , followlinks = self . symlinks ) :
for dirname in dirnames [ : ] :
if is_ignored ( os . path . normpath ( os . path . join ( dirpath , dirname ) ) , norm_patterns ) :
dirnames . remove ( dirname )
if self . verbosity > 1 :
self . stdout . write ( 'ignoring directory %s\n' % dirname )
elif dirname == 'locale' :
dirnames . remove ( dirname )
self . locale_paths . insert ( 0 , os . path . join ( os . path . abspath ( dirpath ) , dirname ) )
for filename in filenames :
file_path = os . path . normpath ( os . path . join ( dirpath , filename ) )
if is_ignored ( file_path , self . ignore_patterns ) :
if self . verbosity > 1 :
self . stdout . write ( 'ignoring file %s in %s\n' % ( filename , dirpath ) )
else :
locale_dir = None
for path in self . locale_paths :
if os . path . abspath ( dirpath ) . startswith ( os . path . dirname ( path ) ) :
locale_dir = path
break
if not locale_dir :
locale_dir = self . default_locale_path
if not locale_dir :
raise CommandError ( "Unable to find a locale path to store translations for file %s" % file_path )
all_files . append ( TranslatableFile ( dirpath , filename , locale_dir ) )
return sorted ( all_files )
def write_po_file ( self , potfile , locale ) :
basedir = os . path . join ( os . path . dirname ( potfile ) , locale , 'LC_MESSAGES' )
if not os . path . isdir ( basedir ) :
os . makedirs ( basedir )
pofile = os . path . join ( basedir , '%s.po' % str ( self . domain ) )
if os . path . exists ( pofile ) :
args = [ 'msgmerge' ] + self . msgmerge_options + [ pofile , potfile ]
msgs , errors , status = popen_wrapper ( args )
if six . PY2 :
msgs = msgs . decode ( 'utf-8' )
if errors :
if status != STATUS_OK :
raise CommandError ( "errors happened while running msgmerge\n%s" % errors )
elif self . verbosity > 0 :
self . stdout . write ( errors )
else :
with io . open ( potfile , 'r' , encoding = 'utf-8' ) as fp :
msgs = fp . read ( )
if not self . invoked_for_django :
msgs = self . copy_plural_forms ( msgs , locale )
msgs = msgs . replace ( "#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self . domain , "" )
with io . open ( pofile , 'w' , encoding = 'utf-8' ) as fp :
fp . write ( msgs )
if self . no_obsolete :
args = [ 'msgattrib' ] + self . msgattrib_options + [ '-o' , pofile , pofile ]
msgs , errors , status = popen_wrapper ( args )
if errors :
if status != STATUS_OK :
raise CommandError ( "errors happened while running msgattrib\n%s" % errors )
elif self . verbosity > 0 :
self . stdout . write ( errors )
def copy_plural_forms ( self , msgs , locale ) :
django_dir = os . path . normpath ( os . path . join ( os . path . dirname ( django . __file__ ) ) )
if self . domain == 'djangojs' :
domains = ( 'djangojs' , 'django' )
else :
domains = ( 'django' , )
for domain in domains :
django_po = os . path . join ( django_dir , 'conf' , 'locale' , locale , 'LC_MESSAGES' , '%s.po' % domain )
if os . path . exists ( django_po ) :
with io . open ( django_po , 'r' , encoding = 'utf-8' ) as fp :
m = plural_forms_re . search ( fp . read ( ) )
if m :
plural_form_line = force_str ( m . group ( 'value' ) )
if self . verbosity > 1 :
self . stdout . write ( "copying plural forms: %s\n" % plural_form_line )
lines = [ ]
found = False
for line in msgs . split ( '\n' ) :
if not found and ( not line or plural_forms_re . search ( line ) ) :
line = '%s\n' % plural_form_line
found = True
lines . append ( line )
msgs = '\n' . join ( lines )
break
return msgs
import sys
import os
import operator
from django . apps import apps
from django . core . management . base import BaseCommand , CommandError
from django . db . migrations import Migration
from django . db . migrations . loader import MigrationLoader
from django . db . migrations . autodetector import MigrationAutodetector
from django . db . migrations . questioner import MigrationQuestioner , InteractiveMigrationQuestioner
from django . db . migrations . state import ProjectState
from django . db . migrations . writer import MigrationWriter
from django . utils . six . moves import reduce
class Command ( BaseCommand ) :
help = "Creates new migration(s) for apps."
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'app_label' , nargs = '*' , help = 'Specify the app label(s) to create migrations for.' )
parser . add_argument ( '--dry-run' , action = 'store_true' , dest = 'dry_run' , default = False , help = "Just show what migrations would be made; don't actually write them." )
parser . add_argument ( '--merge' , action = 'store_true' , dest = 'merge' , default = False , help = "Enable fixing of migration conflicts." )
parser . add_argument ( '--empty' , action = 'store_true' , dest = 'empty' , default = False , help = "Create an empty migration." )
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' )
def handle ( self , * app_labels , ** options ) :
self . verbosity = options . get ( 'verbosity' )
self . interactive = options . get ( 'interactive' )
self . dry_run = options . get ( 'dry_run' , False )
self . merge = options . get ( 'merge' , False )
self . empty = options . get ( 'empty' , False )
app_labels = set ( app_labels )
bad_app_labels = set ( )
for app_label in app_labels :
try :
apps . get_app_config ( app_label )
except LookupError :
bad_app_labels . add ( app_label )
if bad_app_labels :
for app_label in bad_app_labels :
self . stderr . write ( "App '%s' could not be found. Is it in INSTALLED_APPS?" % app_label )
sys . exit ( 2 )
loader = MigrationLoader ( None , ignore_no_migrations = True )
conflicts = loader . detect_conflicts ( )
if conflicts and not self . merge :
name_str = "; " . join ( "%s in %s" % ( ", " . join ( names ) , app ) for app , names in conflicts . items ( ) )
raise CommandError ( "Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str )
if self . merge and not conflicts :
self . stdout . write ( "No conflicts detected to merge." )
return
if self . merge and conflicts :
return self . handle_merge ( loader , conflicts )
autodetector = MigrationAutodetector ( loader . project_state ( ) , ProjectState . from_apps ( apps ) , InteractiveMigrationQuestioner ( specified_apps = app_labels , dry_run = self . dry_run ) , )
if self . empty :
if not app_labels :
raise CommandError ( "You must supply at least one app label when using --empty." )
changes = dict ( ( app , [ Migration ( "custom" , app ) ] ) for app in app_labels )
changes = autodetector . arrange_for_graph ( changes , loader . graph )
self . write_migration_files ( changes )
return
changes = autodetector . changes ( graph = loader . graph , trim_to_apps = app_labels or None , convert_apps = app_labels or None , )
if not changes and self . verbosity >= 1 :
if len ( app_labels ) == 1 :
self . stdout . write ( "No changes detected in app '%s'" % app_labels . pop ( ) )
elif len ( app_labels ) > 1 :
self . stdout . write ( "No changes detected in apps '%s'" % ( "', '" . join ( app_labels ) ) )
else :
self . stdout . write ( "No changes detected" )
return
self . write_migration_files ( changes )
def write_migration_files ( self , changes ) :
directory_created = { }
for app_label , app_migrations in changes . items ( ) :
if self . verbosity >= 1 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Migrations for '%s':" % app_label ) + "\n" )
for migration in app_migrations :
writer = MigrationWriter ( migration )
if self . verbosity >= 1 :
self . stdout . write ( " %s:\n" % ( self . style . MIGRATE_LABEL ( writer . filename ) , ) )
for operation in migration . operations :
self . stdout . write ( " - %s\n" % operation . describe ( ) )
if not self . dry_run :
migrations_directory = os . path . dirname ( writer . path )
if not directory_created . get ( app_label , False ) :
if not os . path . isdir ( migrations_directory ) :
os . mkdir ( migrations_directory )
init_path = os . path . join ( migrations_directory , "__init__.py" )
if not os . path . isfile ( init_path ) :
open ( init_path , "w" ) . close ( )
directory_created [ app_label ] = True
migration_string = writer . as_string ( )
with open ( writer . path , "wb" ) as fh :
fh . write ( migration_string )
elif self . verbosity == 3 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Full migrations file '%s':" % writer . filename ) + "\n" )
self . stdout . write ( "%s\n" % writer . as_string ( ) )
def handle_merge ( self , loader , conflicts ) :
if self . interactive :
questioner = InteractiveMigrationQuestioner ( )
else :
questioner = MigrationQuestioner ( defaults = { 'ask_merge' : True } )
for app_label , migration_names in conflicts . items ( ) :
merge_migrations = [ ]
for migration_name in migration_names :
migration = loader . get_migration ( app_label , migration_name )
migration . ancestry = loader . graph . forwards_plan ( ( app_label , migration_name ) )
merge_migrations . append ( migration )
common_ancestor = None
for level in zip ( * [ m . ancestry for m in merge_migrations ] ) :
if reduce ( operator . eq , level ) :
common_ancestor = level [ 0 ]
else :
break
if common_ancestor is None :
raise ValueError ( "Could not find common ancestor of %s" % migration_names )
for migration in merge_migrations :
migration . branch = migration . ancestry [ ( migration . ancestry . index ( common_ancestor ) + 1 ) : ]
migration . merged_operations = [ ]
for node_app , node_name in migration . branch :
migration . merged_operations . extend ( loader . get_migration ( node_app , node_name ) . operations )
if self . verbosity > 0 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Merging %s" % app_label ) )
for migration in merge_migrations :
self . stdout . write ( self . style . MIGRATE_LABEL ( " Branch %s" % migration . name ) )
for operation in migration . merged_operations :
self . stdout . write ( " - %s\n" % operation . describe ( ) )
if questioner . ask_merge ( app_label ) :
numbers = [ MigrationAutodetector . parse_number ( migration . name ) for migration in merge_migrations ]
try :
biggest_number = max ( [ x for x in numbers if x is not None ] )
except ValueError :
biggest_number = 1
subclass = type ( "Migration" , ( Migration , ) , { "dependencies" : [ ( app_label , migration . name ) for migration in merge_migrations ] , } )
new_migration = subclass ( "%04i_merge" % ( biggest_number + 1 ) , app_label )
writer = MigrationWriter ( new_migration )
with open ( writer . path , "wb" ) as fh :
fh . write ( writer . as_string ( ) )
if self . verbosity > 0 :
self . stdout . write ( "\nCreated new merge migration %s" % writer . path )
from __future__ import unicode_literals
from collections import OrderedDict
from importlib import import_module
import itertools
import traceback
from django . apps import apps
from django . core . management import call_command
from django . core . management . base import BaseCommand , CommandError
from django . core . management . color import no_style
from django . core . management . sql import custom_sql_for_model , emit_post_migrate_signal , emit_pre_migrate_signal
from django . db import connections , router , transaction , DEFAULT_DB_ALIAS
from django . db . migrations . executor import MigrationExecutor
from django . db . migrations . loader import MigrationLoader , AmbiguityError
from django . db . migrations . state import ProjectState
from django . db . migrations . autodetector import MigrationAutodetector
from django . utils . module_loading import module_has_submodule
class Command ( BaseCommand ) :
help = "Updates database schema. Manages both apps with migrations and those without."
def add_arguments ( self , parser ) :
parser . add_argument ( 'app_label' , nargs = '?' , help = 'App label of an application to synchronize the state.' )
parser . add_argument ( 'migration_name' , nargs = '?' , help = 'Database state will be brought to the state after that migration.' )
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' )
parser . add_argument ( '--no-initial-data' , action = 'store_false' , dest = 'load_initial_data' , default = True , help = 'Tells Django not to load any initial data after database synchronization.' )
parser . add_argument ( '--database' , action = 'store' , dest = 'database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to synchronize. ' 'Defaults to the "default" database.' )
parser . add_argument ( '--fake' , action = 'store_true' , dest = 'fake' , default = False , help = 'Mark migrations as run without actually running them' )
parser . add_argument ( '--list' , '-l' , action = 'store_true' , dest = 'list' , default = False , help = 'Show a list of all known migrations and which are applied' )
def handle ( self , * args , ** options ) :
self . verbosity = options . get ( 'verbosity' )
self . interactive = options . get ( 'interactive' )
self . show_traceback = options . get ( 'traceback' )
self . load_initial_data = options . get ( 'load_initial_data' )
self . test_database = options . get ( 'test_database' , False )
for app_config in apps . get_app_configs ( ) :
if module_has_submodule ( app_config . module , "management" ) :
import_module ( '.management' , app_config . name )
db = options . get ( 'database' )
connection = connections [ db ]
if options . get ( "list" , False ) :
return self . show_migration_list ( connection , [ options [ 'app_label' ] ] if options [ 'app_label' ] else None )
executor = MigrationExecutor ( connection , self . migration_progress_callback )
conflicts = executor . loader . detect_conflicts ( )
if conflicts :
name_str = "; " . join ( "%s in %s" % ( ", " . join ( names ) , app ) for app , names in conflicts . items ( ) )
raise CommandError ( "Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str )
run_syncdb = False
target_app_labels_only = True
if options [ 'app_label' ] and options [ 'migration_name' ] :
app_label , migration_name = options [ 'app_label' ] , options [ 'migration_name' ]
if app_label not in executor . loader . migrated_apps :
raise CommandError ( "App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label )
if migration_name == "zero" :
targets = [ ( app_label , None ) ]
else :
try :
migration = executor . loader . get_migration_by_prefix ( app_label , migration_name )
except AmbiguityError :
raise CommandError ( "More than one migration matches '%s' in app '%s'. Please be more specific." % ( migration_name , app_label ) )
except KeyError :
raise CommandError ( "Cannot find a migration matching '%s' from app '%s'." % ( migration_name , app_label ) )
targets = [ ( app_label , migration . name ) ]
target_app_labels_only = False
elif options [ 'app_label' ] :
app_label = options [ 'app_label' ]
if app_label not in executor . loader . migrated_apps :
raise CommandError ( "App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label )
targets = [ key for key in executor . loader . graph . leaf_nodes ( ) if key [ 0 ] == app_label ]
else :
targets = executor . loader . graph . leaf_nodes ( )
run_syncdb = True
plan = executor . migration_plan ( targets )
if self . verbosity >= 1 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Operations to perform:" ) )
if run_syncdb and executor . loader . unmigrated_apps :
self . stdout . write ( self . style . MIGRATE_LABEL ( " Synchronize unmigrated apps: " ) + ( ", " . join ( executor . loader . unmigrated_apps ) ) )
if target_app_labels_only :
self . stdout . write ( self . style . MIGRATE_LABEL ( " Apply all migrations: " ) + ( ", " . join ( set ( a for a , n in targets ) ) or "(none)" ) )
else :
if targets [ 0 ] [ 1 ] is None :
self . stdout . write ( self . style . MIGRATE_LABEL ( " Unapply all migrations: " ) + "%s" % ( targets [ 0 ] [ 0 ] , ) )
else :
self . stdout . write ( self . style . MIGRATE_LABEL ( " Target specific migration: " ) + "%s, from %s" % ( targets [ 0 ] [ 1 ] , targets [ 0 ] [ 0 ] ) )
if run_syncdb and executor . loader . unmigrated_apps :
if self . verbosity >= 1 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Synchronizing apps without migrations:" ) )
created_models = self . sync_apps ( connection , executor . loader . unmigrated_apps )
else :
created_models = [ ]
if options . get ( "test_flush" , False ) :
call_command ( 'flush' , verbosity = max ( self . verbosity - 1 , 0 ) , interactive = False , database = db , reset_sequences = False , inhibit_post_migrate = True , )
if self . verbosity >= 1 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Running migrations:" ) )
if not plan :
if self . verbosity >= 1 :
self . stdout . write ( " No migrations to apply." )
autodetector = MigrationAutodetector ( executor . loader . project_state ( ) , ProjectState . from_apps ( apps ) , )
changes = autodetector . changes ( graph = executor . loader . graph )
if changes :
self . stdout . write ( self . style . NOTICE ( " Your models have changes that are not yet reflected in a migration, and so won't be applied." ) )
self . stdout . write ( self . style . NOTICE ( " Run 'manage.py makemigrations' to make new migrations, and then re-run 'manage.py migrate' to apply them." ) )
else :
executor . migrate ( targets , plan , fake = options . get ( "fake" , False ) )
emit_post_migrate_signal ( created_models , self . verbosity , self . interactive , connection . alias )
def migration_progress_callback ( self , action , migration , fake = False ) :
if self . verbosity >= 1 :
if action == "apply_start" :
self . stdout . write ( " Applying %s..." % migration , ending = "" )
self . stdout . flush ( )
elif action == "apply_success" :
if fake :
self . stdout . write ( self . style . MIGRATE_SUCCESS ( " FAKED" ) )
else :
self . stdout . write ( self . style . MIGRATE_SUCCESS ( " OK" ) )
elif action == "unapply_start" :
self . stdout . write ( " Unapplying %s..." % migration , ending = "" )
self . stdout . flush ( )
elif action == "unapply_success" :
if fake :
self . stdout . write ( self . style . MIGRATE_SUCCESS ( " FAKED" ) )
else :
self . stdout . write ( self . style . MIGRATE_SUCCESS ( " OK" ) )
def sync_apps ( self , connection , app_labels ) :
cursor = connection . cursor ( )
try :
tables = connection . introspection . table_names ( cursor )
seen_models = connection . introspection . installed_models ( tables )
created_models = set ( )
pending_references = { }
all_models = [ ( app_config . label , router . get_migratable_models ( app_config , connection . alias , include_auto_created = True ) ) for app_config in apps . get_app_configs ( ) if app_config . models_module is not None and app_config . label in app_labels ]
def model_installed ( model ) :
opts = model . _meta
converter = connection . introspection . table_name_converter
return not ( ( converter ( opts . db_table ) in tables ) or ( opts . auto_created and converter ( opts . auto_created . _meta . db_table ) in tables ) )
manifest = OrderedDict ( ( app_name , list ( filter ( model_installed , model_list ) ) ) for app_name , model_list in all_models )
create_models = set ( itertools . chain ( * manifest . values ( ) ) )
emit_pre_migrate_signal ( create_models , self . verbosity , self . interactive , connection . alias )
if self . verbosity >= 1 :
self . stdout . write ( " Creating tables...\n" )
with transaction . atomic ( using = connection . alias , savepoint = connection . features . can_rollback_ddl ) :
for app_name , model_list in manifest . items ( ) :
for model in model_list :
if self . verbosity >= 3 :
self . stdout . write ( " Processing %s.%s model\n" % ( app_name , model . _meta . object_name ) )
sql , references = connection . creation . sql_create_model ( model , no_style ( ) , seen_models )
seen_models . add ( model )
created_models . add ( model )
for refto , refs in references . items ( ) :
pending_references . setdefault ( refto , [ ] ) . extend ( refs )
if refto in seen_models :
sql . extend ( connection . creation . sql_for_pending_references ( refto , no_style ( ) , pending_references ) )
sql . extend ( connection . creation . sql_for_pending_references ( model , no_style ( ) , pending_references ) )
if self . verbosity >= 1 and sql :
self . stdout . write ( " Creating table %s\n" % model . _meta . db_table )
for statement in sql :
cursor . execute ( statement )
tables . append ( connection . introspection . table_name_converter ( model . _meta . db_table ) )
finally :
cursor . close ( )
cursor = connection . cursor ( )
try :
if self . verbosity >= 1 :
self . stdout . write ( " Installing custom SQL...\n" )
for app_name , model_list in manifest . items ( ) :
for model in model_list :
if model in created_models :
custom_sql = custom_sql_for_model ( model , no_style ( ) , connection )
if custom_sql :
if self . verbosity >= 2 :
self . stdout . write ( " Installing custom SQL for %s.%s model\n" % ( app_name , model . _meta . object_name ) )
try :
with transaction . atomic ( using = connection . alias ) :
for sql in custom_sql :
cursor . execute ( sql )
except Exception as e :
self . stderr . write ( " Failed to install custom SQL for %s.%s model: %s\n" % ( app_name , model . _meta . object_name , e ) )
if self . show_traceback :
traceback . print_exc ( )
else :
if self . verbosity >= 3 :
self . stdout . write ( " No custom SQL for %s.%s model\n" % ( app_name , model . _meta . object_name ) )
if self . verbosity >= 1 :
self . stdout . write ( " Installing indexes...\n" )
for app_name , model_list in manifest . items ( ) :
for model in model_list :
if model in created_models :
index_sql = connection . creation . sql_indexes_for_model ( model , no_style ( ) )
if index_sql :
if self . verbosity >= 2 :
self . stdout . write ( " Installing index for %s.%s model\n" % ( app_name , model . _meta . object_name ) )
try :
with transaction . atomic ( using = connection . alias , savepoint = connection . features . can_rollback_ddl ) :
for sql in index_sql :
cursor . execute ( sql )
except Exception as e :
self . stderr . write ( " Failed to install index for %s.%s model: %s\n" % ( app_name , model . _meta . object_name , e ) )
finally :
cursor . close ( )
if self . load_initial_data :
for app_label in app_labels :
call_command ( 'loaddata' , 'initial_data' , verbosity = self . verbosity , database = connection . alias , skip_validation = True , app_label = app_label , hide_empty = True )
return created_models
def show_migration_list ( self , connection , app_names = None ) :
loader = MigrationLoader ( connection )
graph = loader . graph
if app_names :
invalid_apps = [ ]
for app_name in app_names :
if app_name not in loader . migrated_apps :
invalid_apps . append ( app_name )
if invalid_apps :
raise CommandError ( "No migrations present for: %s" % ( ", " . join ( invalid_apps ) ) )
else :
app_names = sorted ( loader . migrated_apps )
for app_name in app_names :
self . stdout . write ( app_name , self . style . MIGRATE_LABEL )
shown = set ( )
for node in graph . leaf_nodes ( app_name ) :
for plan_node in graph . forwards_plan ( node ) :
if plan_node not in shown and plan_node [ 0 ] == app_name :
title = plan_node [ 1 ]
if graph . nodes [ plan_node ] . replaces :
title += " (%s squashed migrations)" % len ( graph . nodes [ plan_node ] . replaces )
if plan_node in loader . applied_migrations :
self . stdout . write ( " [X] %s" % title )
else :
self . stdout . write ( " [ ] %s" % title )
shown . add ( plan_node )
if not shown :
self . stdout . write ( " (no migrations)" , self . style . MIGRATE_FAILURE )
import argparse
import warnings
from django . core . management . base import BaseCommand
from django . utils . deprecation import RemovedInDjango19Warning
class Command ( BaseCommand ) :
help = "Runs this project as a FastCGI application. Requires flup."
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , nargs = argparse . REMAINDER , help = 'Various KEY=val options.' )
def handle ( self , * args , ** options ) :
warnings . warn ( "FastCGI support has been deprecated and will be removed in Django 1.9." , RemovedInDjango19Warning )
from django . conf import settings
from django . utils import translation
try :
translation . activate ( settings . LANGUAGE_CODE )
except AttributeError :
pass
from django . core . servers . fastcgi import runfastcgi
runfastcgi ( args )
def usage ( self , subcommand ) :
from django . core . servers . fastcgi import FASTCGI_HELP
return FASTCGI_HELP
from __future__ import unicode_literals
from datetime import datetime
import errno
import os
import re
import sys
import socket
from django . core . management . base import BaseCommand , CommandError
from django . core . servers . basehttp import run , get_internal_wsgi_application
from django . db import connections , DEFAULT_DB_ALIAS
from django . db . migrations . executor import MigrationExecutor
from django . utils import autoreload
from django . utils import six
from django . core . exceptions import ImproperlyConfigured
naiveip_re = re . compile ( r"""^(?: (?P (?P\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address (?P\[[a-fA-F0-9:]+\]) | # IPv6 address (?P[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN ):)?(?P\d+)$""" , re . X )
DEFAULT_PORT = "8000"
class Command ( BaseCommand ) :
help = "Starts a lightweight Web server for development."
requires_system_checks = False
def add_arguments ( self , parser ) :
parser . add_argument ( 'addrport' , nargs = '?' , help = 'Optional port number, or ipaddr:port' )
parser . add_argument ( '--ipv6' , '-6' , action = 'store_true' , dest = 'use_ipv6' , default = False , help = 'Tells Django to use an IPv6 address.' )
parser . add_argument ( '--nothreading' , action = 'store_false' , dest = 'use_threading' , default = True , help = 'Tells Django to NOT use threading.' )
parser . add_argument ( '--noreload' , action = 'store_false' , dest = 'use_reloader' , default = True , help = 'Tells Django to NOT use the auto-reloader.' )
def get_handler ( self , * args , ** options ) :
return get_internal_wsgi_application ( )
def handle ( self , * args , ** options ) :
from django . conf import settings
if not settings . DEBUG and not settings . ALLOWED_HOSTS :
raise CommandError ( 'You must set settings.ALLOWED_HOSTS if DEBUG is False.' )
self . use_ipv6 = options . get ( 'use_ipv6' )
if self . use_ipv6 and not socket . has_ipv6 :
raise CommandError ( 'Your Python does not support IPv6.' )
self . _raw_ipv6 = False
if not options . get ( 'addrport' ) :
self . addr = ''
self . port = DEFAULT_PORT
else :
m = re . match ( naiveip_re , options [ 'addrport' ] )
if m is None :
raise CommandError ( '"%s" is not a valid port number ' 'or address:port pair.' % options [ 'addrport' ] )
self . addr , _ipv4 , _ipv6 , _fqdn , self . port = m . groups ( )
if not self . port . isdigit ( ) :
raise CommandError ( "%r is not a valid port number." % self . port )
if self . addr :
if _ipv6 :
self . addr = self . addr [ 1 : - 1 ]
self . use_ipv6 = True
self . _raw_ipv6 = True
elif self . use_ipv6 and not _fqdn :
raise CommandError ( '"%s" is not a valid IPv6 address.' % self . addr )
if not self . addr :
self . addr = '::1' if self . use_ipv6 else '127.0.0.1'
self . _raw_ipv6 = bool ( self . use_ipv6 )
self . run ( ** options )
def run ( self , ** options ) :
use_reloader = options . get ( 'use_reloader' )
if use_reloader :
autoreload . main ( self . inner_run , None , options )
else :
self . inner_run ( None , ** options )
def inner_run ( self , * args , ** options ) :
from django . conf import settings
from django . utils import translation
threading = options . get ( 'use_threading' )
shutdown_message = options . get ( 'shutdown_message' , '' )
quit_command = 'CTRL-BREAK' if sys . platform == 'win32' else 'CONTROL-C'
self . stdout . write ( "Performing system checks...\n\n" )
self . validate ( display_num_errors = True )
try :
self . check_migrations ( )
except ImproperlyConfigured :
pass
now = datetime . now ( ) . strftime ( '%B %d, %Y - %X' )
if six . PY2 :
now = now . decode ( 'utf-8' )
self . stdout . write ( ( "%(started_at)s\n" "Django version %(version)s, using settings %(settings)r\n" "Starting development server at http://%(addr)s:%(port)s/\n" "Quit the server with %(quit_command)s.\n" ) % { "started_at" : now , "version" : self . get_version ( ) , "settings" : settings . SETTINGS_MODULE , "addr" : '[%s]' % self . addr if self . _raw_ipv6 else self . addr , "port" : self . port , "quit_command" : quit_command , } )
translation . activate ( settings . LANGUAGE_CODE )
try :
handler = self . get_handler ( * args , ** options )
run ( self . addr , int ( self . port ) , handler , ipv6 = self . use_ipv6 , threading = threading )
except socket . error as e :
ERRORS = { errno . EACCES : "You don't have permission to access that port." , errno . EADDRINUSE : "That port is already in use." , errno . EADDRNOTAVAIL : "That IP address can't be assigned-to." , }
try :
error_text = ERRORS [ e . errno ]
except KeyError :
error_text = str ( e )
self . stderr . write ( "Error: %s" % error_text )
os . _exit ( 1 )
except KeyboardInterrupt :
if shutdown_message :
self . stdout . write ( shutdown_message )
sys . exit ( 0 )
def check_migrations ( self ) :
executor = MigrationExecutor ( connections [ DEFAULT_DB_ALIAS ] )
plan = executor . migration_plan ( executor . loader . graph . leaf_nodes ( ) )
if plan :
self . stdout . write ( self . style . NOTICE ( "\nYou have unapplied migrations; your app may not work properly until they are applied." ) )
self . stdout . write ( self . style . NOTICE ( "Run 'python manage.py migrate' to apply them.\n" ) )
BaseRunserverCommand = Command
import os
from django . core . management . base import BaseCommand
class Command ( BaseCommand ) :
help = "Runs a Python interactive interpreter. Tries to use IPython or bpython, if one of them is available."
requires_system_checks = False
shells = [ 'ipython' , 'bpython' ]
def add_arguments ( self , parser ) :
parser . add_argument ( '--plain' , action = 'store_true' , dest = 'plain' , help = 'Tells Django to use plain Python, not IPython or bpython.' )
parser . add_argument ( '--no-startup' , action = 'store_true' , dest = 'no_startup' , help = 'When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.' )
parser . add_argument ( '-i' , '--interface' , choices = self . shells , dest = 'interface' , help = 'Specify an interactive interpreter interface. Available options: "ipython" and "bpython"' )
def _ipython_pre_011 ( self ) :
from IPython . Shell import IPShell
shell = IPShell ( argv = [ ] )
shell . mainloop ( )
def _ipython_pre_100 ( self ) :
from IPython . frontend . terminal . ipapp import TerminalIPythonApp
app = TerminalIPythonApp . instance ( )
app . initialize ( argv = [ ] )
app . start ( )
def _ipython ( self ) :
from IPython import start_ipython
start_ipython ( argv = [ ] )
def ipython ( self ) :
for ip in ( self . _ipython , self . _ipython_pre_100 , self . _ipython_pre_011 ) :
try :
ip ( )
except ImportError :
pass
else :
return
raise ImportError ( "No IPython" )
def bpython ( self ) :
import bpython
bpython . embed ( )
def run_shell ( self , shell = None ) :
available_shells = [ shell ] if shell else self . shells
for shell in available_shells :
try :
return getattr ( self , shell ) ( )
except ImportError :
pass
raise ImportError
def handle ( self , ** options ) :
try :
if options [ 'plain' ] :
raise ImportError
self . run_shell ( shell = options [ 'interface' ] )
except ImportError :
import code
imported_objects = { }
try :
import readline
except ImportError :
pass
else :
import rlcompleter
readline . set_completer ( rlcompleter . Completer ( imported_objects ) . complete )
readline . parse_and_bind ( "tab:complete" )
if not options [ 'no_startup' ] :
for pythonrc in ( os . environ . get ( "PYTHONSTARTUP" ) , '~/.pythonrc.py' ) :
if not pythonrc :
continue
pythonrc = os . path . expanduser ( pythonrc )
if not os . path . isfile ( pythonrc ) :
continue
try :
with open ( pythonrc ) as handle :
exec ( compile ( handle . read ( ) , pythonrc , 'exec' ) , imported_objects )
except NameError :
pass
code . interact ( local = imported_objects )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import sql_create
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = "Prints the CREATE TABLE SQL statements for the given app name(s)."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options [ 'database' ] ]
statements = sql_create ( app_config , self . style , connection )
return '\n' . join ( statements )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import sql_all
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = "Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s)."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options [ 'database' ] ]
statements = sql_all ( app_config , self . style , connection )
return '\n' . join ( statements )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import sql_delete
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = "Prints the DROP TABLE SQL statements for the given app name(s)."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options [ 'database' ] ]
statements = sql_delete ( app_config , self . style , connection )
return '\n' . join ( statements )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import sql_custom
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = "Prints the custom table modifying SQL statements for the given app name(s)."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options [ 'database' ] ]
statements = sql_custom ( app_config , self . style , connection )
return '\n' . join ( statements )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import sql_destroy_indexes
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = "Prints the DROP INDEX SQL statements for the given model module name(s)."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options [ 'database' ] ]
statements = sql_destroy_indexes ( app_config , self . style , connection )
return '\n' . join ( statements )
from __future__ import unicode_literals
from django . core . management . base import BaseCommand
from django . core . management . sql import sql_flush
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( BaseCommand ) :
help = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle ( self , ** options ) :
return '\n' . join ( sql_flush ( self . style , connections [ options [ 'database' ] ] , only_django = True ) )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import sql_indexes
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options [ 'database' ] ]
statements = sql_indexes ( app_config , self . style , connection )
return '\n' . join ( statements )
from __future__ import unicode_literals
from django . core . management . base import BaseCommand , CommandError
from django . db import connections , DEFAULT_DB_ALIAS
from django . db . migrations . executor import MigrationExecutor
from django . db . migrations . loader import AmbiguityError
class Command ( BaseCommand ) :
help = "Prints the SQL statements for the named migration."
def add_arguments ( self , parser ) :
parser . add_argument ( 'app_label' , help = 'App label of the application containing the migration.' )
parser . add_argument ( 'migration_name' , help = 'Migration name to print the SQL for.' )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to create SQL for. Defaults to the ' '"default" database.' )
parser . add_argument ( '--backwards' , action = 'store_true' , dest = 'backwards' , default = False , help = 'Creates SQL to unapply the migration, rather than to apply it' )
def handle ( self , * args , ** options ) :
connection = connections [ options [ 'database' ] ]
executor = MigrationExecutor ( connection )
app_label , migration_name = options [ 'app_label' ] , options [ 'migration_name' ]
if app_label not in executor . loader . migrated_apps :
raise CommandError ( "App '%s' does not have migrations" % app_label )
try :
migration = executor . loader . get_migration_by_prefix ( app_label , migration_name )
except AmbiguityError :
raise CommandError ( "More than one migration matches '%s' in app '%s'. Please be more specific." % ( migration_name , app_label ) )
except KeyError :
raise CommandError ( "Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % ( migration_name , app_label ) )
targets = [ ( app_label , migration . name ) ]
plan = [ ( executor . loader . graph . nodes [ targets [ 0 ] ] , options [ 'backwards' ] ) ]
sql_statements = executor . collect_sql ( plan )
for statement in sql_statements :
self . stdout . write ( statement )
from __future__ import unicode_literals
from django . core . management . base import AppCommand
from django . core . management . sql import check_for_migrations
from django . db import connections , DEFAULT_DB_ALIAS
class Command ( AppCommand ) :
help = 'Prints the SQL statements for resetting sequences for the given app name(s).'
output_transaction = True
def add_arguments ( self , parser ) :
super ( Command , self ) . add_arguments ( parser )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to print the SQL for. Defaults to the ' '"default" database.' )
def handle_app_config ( self , app_config , ** options ) :
if app_config . models_module is None :
return
connection = connections [ options . get ( 'database' ) ]
check_for_migrations ( app_config , connection )
models = app_config . get_models ( include_auto_created = True )
statements = connection . ops . sequence_reset_sql ( self . style , models )
return '\n' . join ( statements )
from django . core . management . base import BaseCommand , CommandError
from django . utils import six
from django . db import connections , DEFAULT_DB_ALIAS , migrations
from django . db . migrations . loader import AmbiguityError
from django . db . migrations . executor import MigrationExecutor
from django . db . migrations . writer import MigrationWriter
from django . db . migrations . optimizer import MigrationOptimizer
class Command ( BaseCommand ) :
help = "Squashes an existing set of migrations (from first until specified) into a single new one."
def add_arguments ( self , parser ) :
parser . add_argument ( 'app_label' , help = 'App label of the application to squash migrations for.' )
parser . add_argument ( 'migration_name' , help = 'Migrations will be squashed until and including this migration.' )
parser . add_argument ( '--no-optimize' , action = 'store_true' , dest = 'no_optimize' , default = False , help = 'Do not try to optimize the squashed operations.' )
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' )
def handle ( self , ** options ) :
self . verbosity = options . get ( 'verbosity' )
self . interactive = options . get ( 'interactive' )
app_label , migration_name = options [ 'app_label' ] , options [ 'migration_name' ]
executor = MigrationExecutor ( connections [ DEFAULT_DB_ALIAS ] )
if app_label not in executor . loader . migrated_apps :
raise CommandError ( "App '%s' does not have migrations (so squashmigrations on it makes no sense)" % app_label )
try :
migration = executor . loader . get_migration_by_prefix ( app_label , migration_name )
except AmbiguityError :
raise CommandError ( "More than one migration matches '%s' in app '%s'. Please be more specific." % ( migration_name , app_label ) )
except KeyError :
raise CommandError ( "Cannot find a migration matching '%s' from app '%s'." % ( migration_name , app_label ) )
migrations_to_squash = [ executor . loader . get_migration ( al , mn ) for al , mn in executor . loader . graph . forwards_plan ( ( migration . app_label , migration . name ) ) if al == migration . app_label ]
if self . verbosity > 0 or self . interactive :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Will squash the following migrations:" ) )
for migration in migrations_to_squash :
self . stdout . write ( " - %s" % migration . name )
if self . interactive :
answer = None
while not answer or answer not in "yn" :
answer = six . moves . input ( "Do you wish to proceed? [yN] " )
if not answer :
answer = "n"
break
else :
answer = answer [ 0 ] . lower ( )
if answer != "y" :
return
operations = [ ]
for smigration in migrations_to_squash :
operations . extend ( smigration . operations )
if self . verbosity > 0 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Optimizing..." ) )
optimizer = MigrationOptimizer ( )
new_operations = optimizer . optimize ( operations , migration . app_label )
if self . verbosity > 0 :
if len ( new_operations ) == len ( operations ) :
self . stdout . write ( " No optimizations possible." )
else :
self . stdout . write ( " Optimized from %s operations to %s operations." % ( len ( operations ) , len ( new_operations ) ) )
replaces = [ ]
for migration in migrations_to_squash :
if migration . replaces :
replaces . extend ( migration . replaces )
else :
replaces . append ( ( migration . app_label , migration . name ) )
subclass = type ( "Migration" , ( migrations . Migration , ) , { "dependencies" : [ ] , "operations" : new_operations , "replaces" : replaces , } )
new_migration = subclass ( "0001_squashed_%s" % migration . name , app_label )
writer = MigrationWriter ( new_migration )
with open ( writer . path , "wb" ) as fh :
fh . write ( writer . as_string ( ) )
if self . verbosity > 0 :
self . stdout . write ( self . style . MIGRATE_HEADING ( "Created new squashed migration %s" % writer . path ) )
self . stdout . write ( " You should commit this migration but leave the old ones in place;" )
self . stdout . write ( " the new migration will be used for new installs. Once you are sure" )
self . stdout . write ( " all instances of the codebase have applied the migrations you squashed," )
self . stdout . write ( " you can delete them." )
from importlib import import_module
from django . core . management . base import CommandError
from django . core . management . templates import TemplateCommand
class Command ( TemplateCommand ) :
help = ( "Creates a Django app directory structure for the given app " "name in the current directory or optionally in the given " "directory." )
missing_args_message = "You must provide an application name."
def handle ( self , ** options ) :
app_name , target = options . pop ( 'name' ) , options . pop ( 'directory' )
self . validate_name ( app_name , "app" )
try :
import_module ( app_name )
except ImportError :
pass
else :
raise CommandError ( "%r conflicts with the name of an existing " "Python module and cannot be used as an app " "name. Please try another name." % app_name )
super ( Command , self ) . handle ( 'app' , app_name , target , ** options )
from importlib import import_module
from django . core . management . base import CommandError
from django . core . management . templates import TemplateCommand
from django . utils . crypto import get_random_string
class Command ( TemplateCommand ) :
help = ( "Creates a Django project directory structure for the given " "project name in the current directory or optionally in the " "given directory." )
missing_args_message = "You must provide a project name."
def handle ( self , ** options ) :
project_name , target = options . pop ( 'name' ) , options . pop ( 'directory' )
self . validate_name ( project_name , "project" )
try :
import_module ( project_name )
except ImportError :
pass
else :
raise CommandError ( "%r conflicts with the name of an existing " "Python module and cannot be used as a " "project name. Please try another name." % project_name )
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
options [ 'secret_key' ] = get_random_string ( 50 , chars )
super ( Command , self ) . handle ( 'project' , project_name , target , ** options )
import warnings
from django . apps import apps
from django . contrib . auth import get_user_model
from django . db import DEFAULT_DB_ALIAS
from django . core . management import call_command
from django . core . management . base import BaseCommand
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . six . moves import input
class Command ( BaseCommand ) :
help = "Deprecated - use 'migrate' instead."
def add_arguments ( self , parser ) :
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' )
parser . add_argument ( '--no-initial-data' , action = 'store_false' , dest = 'load_initial_data' , default = True , help = 'Tells Django not to load any initial data after database synchronization.' )
parser . add_argument ( '--database' , default = DEFAULT_DB_ALIAS , help = 'Nominates a database to synchronize. Defaults to the "default" database.' )
def handle ( self , ** options ) :
warnings . warn ( "The syncdb command will be removed in Django 1.9" , RemovedInDjango19Warning )
call_command ( "migrate" , ** options )
try :
apps . get_model ( 'auth' , 'Permission' )
except LookupError :
return
UserModel = get_user_model ( )
if not UserModel . _default_manager . exists ( ) and options . get ( 'interactive' ) :
msg = ( "\nYou have installed Django's auth system, and " "don't have any superusers defined.\nWould you like to create one " "now? (yes/no): " )
confirm = input ( msg )
while 1 :
if confirm not in ( 'yes' , 'no' ) :
confirm = input ( 'Please enter either "yes" or "no": ' )
continue
if confirm == 'yes' :
call_command ( "createsuperuser" , interactive = True , database = options [ 'database' ] )
break
import logging
import sys
import os
from django . conf import settings
from django . core . management . base import BaseCommand
from django . test . utils import get_runner
class Command ( BaseCommand ) :
help = 'Discover and run tests in the specified modules or the current directory.'
requires_system_checks = False
def __init__ ( self ) :
self . test_runner = None
super ( Command , self ) . __init__ ( )
def run_from_argv ( self , argv ) :
option = '--testrunner='
for arg in argv [ 2 : ] :
if arg . startswith ( option ) :
self . test_runner = arg [ len ( option ) : ]
break
super ( Command , self ) . run_from_argv ( argv )
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'test_label' , nargs = '*' , help = 'Module paths to test; can be modulename, modulename.TestCase or modulename.TestCase.test_method' )
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' ) ,
parser . add_argument ( '--failfast' , action = 'store_true' , dest = 'failfast' , default = False , help = 'Tells Django to stop running the test suite after first ' 'failed test.' ) ,
parser . add_argument ( '--testrunner' , action = 'store' , dest = 'testrunner' , help = 'Tells Django to use specified test runner class instead of ' 'the one specified by the TEST_RUNNER setting.' ) ,
parser . add_argument ( '--liveserver' , action = 'store' , dest = 'liveserver' , default = None , help = 'Overrides the default address where the live server (used ' 'with LiveServerTestCase) is expected to run from. The ' 'default value is localhost:8081.' ) ,
test_runner_class = get_runner ( settings , self . test_runner )
if hasattr ( test_runner_class , 'option_list' ) :
raise RuntimeError ( "The method to extend accepted command-line arguments by the " "test management command has changed in Django 1.8. Please " "create an add_arguments class method to achieve this." )
if hasattr ( test_runner_class , 'add_arguments' ) :
test_runner_class . add_arguments ( parser )
def execute ( self , * args , ** options ) :
if options [ 'verbosity' ] > 0 :
logger = logging . getLogger ( 'py.warnings' )
handler = logging . StreamHandler ( )
logger . addHandler ( handler )
super ( Command , self ) . execute ( * args , ** options )
if options [ 'verbosity' ] > 0 :
logger . removeHandler ( handler )
def handle ( self , * test_labels , ** options ) :
from django . conf import settings
from django . test . utils import get_runner
TestRunner = get_runner ( settings , options . get ( 'testrunner' ) )
if options . get ( 'liveserver' ) is not None :
os . environ [ 'DJANGO_LIVE_TEST_SERVER_ADDRESS' ] = options [ 'liveserver' ]
del options [ 'liveserver' ]
test_runner = TestRunner ( ** options )
failures = test_runner . run_tests ( test_labels )
if failures :
sys . exit ( bool ( failures ) )
from django . core . management import call_command
from django . core . management . base import BaseCommand
from django . db import connection
class Command ( BaseCommand ) :
help = 'Runs a development server with data from the given fixture(s).'
args = '[fixture ...]'
requires_system_checks = False
def add_arguments ( self , parser ) :
parser . add_argument ( 'args' , metavar = 'fixture' , nargs = '*' , help = 'Path(s) to fixtures to load before running the server.' )
parser . add_argument ( '--noinput' , action = 'store_false' , dest = 'interactive' , default = True , help = 'Tells Django to NOT prompt the user for input of any kind.' )
parser . add_argument ( '--addrport' , default = '' , help = 'Port number or ipaddr:port to run the server on.' )
parser . add_argument ( '--ipv6' , '-6' , action = 'store_true' , dest = 'use_ipv6' , default = False , help = 'Tells Django to use an IPv6 address.' )
def handle ( self , * fixture_labels , ** options ) :
verbosity = options . get ( 'verbosity' )
interactive = options . get ( 'interactive' )
db_name = connection . creation . create_test_db ( verbosity = verbosity , autoclobber = not interactive , serialize = False )
call_command ( 'loaddata' , * fixture_labels , ** { 'verbosity' : verbosity } )
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
use_threading = connection . features . test_db_allows_multiple_connections
call_command ( 'runserver' , addrport = options [ 'addrport' ] , shutdown_message = shutdown_message , use_reloader = False , use_ipv6 = options [ 'use_ipv6' ] , use_threading = use_threading )
from __future__ import unicode_literals
import warnings
from django . core . management . commands . check import Command as CheckCommand
from django . utils . deprecation import RemovedInDjango19Warning
class Command ( CheckCommand ) :
help = 'Deprecated. Use "check" command instead. ' + CheckCommand . help
def handle ( self , ** options ) :
warnings . warn ( '"validate" has been deprecated in favor of "check".' , RemovedInDjango19Warning )
super ( Command , self ) . handle ( ** options )
from __future__ import unicode_literals
import codecs
import os
import re
import warnings
from django . apps import apps
from django . conf import settings
from django . core . management . base import CommandError
from django . db import models , router
from django . utils . deprecation import RemovedInDjango19Warning
def check_for_migrations ( app_config , connection ) :
from django . db . migrations . loader import MigrationLoader
loader = MigrationLoader ( connection )
if app_config . label in loader . migrated_apps :
raise CommandError ( "App '%s' has migrations. Only the sqlmigrate and sqlflush commands can be used when an app has migrations." % app_config . label )
def sql_create ( app_config , style , connection ) :
check_for_migrations ( app_config , connection )
if connection . settings_dict [ 'ENGINE' ] == 'django.db.backends.dummy' :
raise CommandError ( "Django doesn't know which syntax to use for your SQL statements,\n" + "because you haven't properly specified the ENGINE setting for the database.\n" + "see: https://docs.djangoproject.com/en/dev/ref/settings/#databases" )
app_models = app_config . get_models ( include_auto_created = True )
final_output = [ ]
tables = connection . introspection . table_names ( )
known_models = set ( model for model in connection . introspection . installed_models ( tables ) if model not in app_models )
pending_references = { }
for model in router . get_migratable_models ( app_config , connection . alias , include_auto_created = True ) :
output , references = connection . creation . sql_create_model ( model , style , known_models )
final_output . extend ( output )
for refto , refs in references . items ( ) :
pending_references . setdefault ( refto , [ ] ) . extend ( refs )
if refto in known_models :
final_output . extend ( connection . creation . sql_for_pending_references ( refto , style , pending_references ) )
final_output . extend ( connection . creation . sql_for_pending_references ( model , style , pending_references ) )
known_models . add ( model )
not_installed_models = set ( pending_references . keys ( ) )
if not_installed_models :
alter_sql = [ ]
for model in not_installed_models :
alter_sql . extend ( [ '-- ' + sql for sql in connection . creation . sql_for_pending_references ( model , style , pending_references ) ] )
if alter_sql :
final_output . append ( '-- The following references should be added but depend on non-existent tables:' )
final_output . extend ( alter_sql )
return final_output
def sql_delete ( app_config , style , connection , close_connection = True ) :
check_for_migrations ( app_config , connection )
try :
cursor = connection . cursor ( )
except Exception :
cursor = None
try :
if cursor :
table_names = connection . introspection . table_names ( cursor )
else :
table_names = [ ]
output = [ ]
to_delete = set ( )
references_to_delete = { }
app_models = router . get_migratable_models ( app_config , connection . alias , include_auto_created = True )
for model in app_models :
if cursor and connection . introspection . table_name_converter ( model . _meta . db_table ) in table_names :
opts = model . _meta
for f in opts . local_fields :
if f . rel and f . rel . to not in to_delete :
references_to_delete . setdefault ( f . rel . to , [ ] ) . append ( ( model , f ) )
to_delete . add ( model )
for model in app_models :
if connection . introspection . table_name_converter ( model . _meta . db_table ) in table_names :
output . extend ( connection . creation . sql_destroy_model ( model , references_to_delete , style ) )
finally :
if cursor and close_connection :
cursor . close ( )
connection . close ( )
return output [ : : - 1 ]
def sql_flush ( style , connection , only_django = False , reset_sequences = True , allow_cascade = False ) :
if only_django :
tables = connection . introspection . django_table_names ( only_existing = True )
else :
tables = connection . introspection . table_names ( )
seqs = connection . introspection . sequence_list ( ) if reset_sequences else ( )
statements = connection . ops . sql_flush ( style , tables , seqs , allow_cascade )
return statements
def sql_custom ( app_config , style , connection ) :
check_for_migrations ( app_config , connection )
output = [ ]
app_models = router . get_migratable_models ( app_config , connection . alias )
for model in app_models :
output . extend ( custom_sql_for_model ( model , style , connection ) )
return output
def sql_indexes ( app_config , style , connection ) :
check_for_migrations ( app_config , connection )
output = [ ]
for model in router . get_migratable_models ( app_config , connection . alias , include_auto_created = True ) :
output . extend ( connection . creation . sql_indexes_for_model ( model , style ) )
return output
def sql_destroy_indexes ( app_config , style , connection ) :
check_for_migrations ( app_config , connection )
output = [ ]
for model in router . get_migratable_models ( app_config , connection . alias , include_auto_created = True ) :
output . extend ( connection . creation . sql_destroy_indexes_for_model ( model , style ) )
return output
def sql_all ( app_config , style , connection ) :
check_for_migrations ( app_config , connection )
return sql_create ( app_config , style , connection ) + sql_custom ( app_config , style , connection ) + sql_indexes ( app_config , style , connection )
def _split_statements ( content ) :
comment_re = re . compile ( r"^((?:'[^']*'|[^'])*?)--.*$" )
statements = [ ]
statement = [ ]
for line in content . split ( "\n" ) :
cleaned_line = comment_re . sub ( r"\1" , line ) . strip ( )
if not cleaned_line :
continue
statement . append ( cleaned_line )
if cleaned_line . endswith ( ";" ) :
statements . append ( " " . join ( statement ) )
statement = [ ]
return statements
def custom_sql_for_model ( model , style , connection ) :
opts = model . _meta
app_dirs = [ ]
app_dir = apps . get_app_config ( model . _meta . app_label ) . path
app_dirs . append ( os . path . normpath ( os . path . join ( app_dir , 'sql' ) ) )
old_app_dir = os . path . normpath ( os . path . join ( app_dir , 'models/sql' ) )
if os . path . exists ( old_app_dir ) :
warnings . warn ( "Custom SQL location '/models/sql' is " "deprecated, use '/sql' instead." , RemovedInDjango19Warning )
app_dirs . append ( old_app_dir )
output = [ ]
if opts . managed :
post_sql_fields = [ f for f in opts . local_fields if hasattr ( f , 'post_create_sql' ) ]
for f in post_sql_fields :
output . extend ( f . post_create_sql ( style , model . _meta . db_table ) )
backend_name = connection . settings_dict [ 'ENGINE' ] . split ( '.' ) [ - 1 ]
sql_files = [ ]
for app_dir in app_dirs :
sql_files . append ( os . path . join ( app_dir , "%s.%s.sql" % ( opts . model_name , backend_name ) ) )
sql_files . append ( os . path . join ( app_dir , "%s.sql" % opts . model_name ) )
for sql_file in sql_files :
if os . path . exists ( sql_file ) :
with codecs . open ( sql_file , 'r' , encoding = settings . FILE_CHARSET ) as fp :
output . extend ( connection . ops . prepare_sql_script ( fp . read ( ) , _allow_fallback = True ) )
return output
def emit_pre_migrate_signal ( create_models , verbosity , interactive , db ) :
for app_config in apps . get_app_configs ( ) :
if app_config . models_module is None :
continue
if verbosity >= 2 :
print ( "Running pre-migrate handlers for application %s" % app_config . label )
models . signals . pre_migrate . send ( sender = app_config , app_config = app_config , verbosity = verbosity , interactive = interactive , using = db )
models . signals . pre_syncdb . send ( sender = app_config . models_module , app = app_config . models_module , create_models = create_models , verbosity = verbosity , interactive = interactive , db = db )
def emit_post_migrate_signal ( created_models , verbosity , interactive , db ) :
for app_config in apps . get_app_configs ( ) :
if app_config . models_module is None :
continue
if verbosity >= 2 :
print ( "Running post-migrate handlers for application %s" % app_config . label )
models . signals . post_migrate . send ( sender = app_config , app_config = app_config , verbosity = verbosity , interactive = interactive , using = db )
models . signals . post_syncdb . send ( sender = app_config . models_module , app = app_config . models_module , created_models = created_models , verbosity = verbosity , interactive = interactive , db = db )
import cgi
import errno
import mimetypes
import os
import posixpath
import re
import shutil
import stat
import sys
import tempfile
from os import path
import django
from django . template import Template , Context
from django . utils import archive
from django . utils . six . moves . urllib . request import urlretrieve
from django . utils . _os import rmtree_errorhandler
from django . core . management . base import BaseCommand , CommandError
from django . core . management . utils import handle_extensions
_drive_re = re . compile ( '^([a-z]):' , re . I )
_url_drive_re = re . compile ( '^([a-z])[:|]' , re . I )
class TemplateCommand ( BaseCommand ) :
requires_system_checks = False
can_import_settings = False
url_schemes = [ 'http' , 'https' , 'ftp' ]
leave_locale_alone = True
def add_arguments ( self , parser ) :
parser . add_argument ( 'name' , help = 'Name of the application or project.' )
parser . add_argument ( 'directory' , nargs = '?' , help = 'Optional destination directory' )
parser . add_argument ( '--template' , help = 'The path or URL to load the template from.' )
parser . add_argument ( '--extension' , '-e' , dest = 'extensions' , action = 'append' , default = [ 'py' ] , help = 'The file extension(s) to render (default: "py"). ' 'Separate multiple extensions with commas, or use ' '-e multiple times.' )
parser . add_argument ( '--name' , '-n' , dest = 'files' , action = 'append' , default = [ ] , help = 'The file name(s) to render. ' 'Separate multiple extensions with commas, or use ' '-n multiple times.' )
def handle ( self , app_or_project , name , target = None , ** options ) :
self . app_or_project = app_or_project
self . paths_to_remove = [ ]
self . verbosity = options [ 'verbosity' ]
self . validate_name ( name , app_or_project )
if target is None :
top_dir = path . join ( os . getcwd ( ) , name )
try :
os . makedirs ( top_dir )
except OSError as e :
if e . errno == errno . EEXIST :
message = "'%s' already exists" % top_dir
else :
message = e
raise CommandError ( message )
else :
top_dir = os . path . abspath ( path . expanduser ( target ) )
if not os . path . exists ( top_dir ) :
raise CommandError ( "Destination directory '%s' does not " "exist, please create it first." % top_dir )
extensions = tuple ( handle_extensions ( options [ 'extensions' ] , ignored = ( ) ) )
extra_files = [ ]
for file in options [ 'files' ] :
extra_files . extend ( map ( lambda x : x . strip ( ) , file . split ( ',' ) ) )
if self . verbosity >= 2 :
self . stdout . write ( "Rendering %s template files with " "extensions: %s\n" % ( app_or_project , ', ' . join ( extensions ) ) )
self . stdout . write ( "Rendering %s template files with " "filenames: %s\n" % ( app_or_project , ', ' . join ( extra_files ) ) )
base_name = '%s_name' % app_or_project
base_subdir = '%s_template' % app_or_project
base_directory = '%s_directory' % app_or_project
if django . VERSION [ - 2 ] != 'final' :
docs_version = 'dev'
else :
docs_version = '%d.%d' % django . VERSION [ : 2 ]
context = Context ( dict ( options , ** { base_name : name , base_directory : top_dir , 'docs_version' : docs_version , } ) , autoescape = False )
from django . conf import settings
if not settings . configured :
settings . configure ( )
template_dir = self . handle_template ( options [ 'template' ] , base_subdir )
prefix_length = len ( template_dir ) + 1
for root , dirs , files in os . walk ( template_dir ) :
path_rest = root [ prefix_length : ]
relative_dir = path_rest . replace ( base_name , name )
if relative_dir :
target_dir = path . join ( top_dir , relative_dir )
if not path . exists ( target_dir ) :
os . mkdir ( target_dir )
for dirname in dirs [ : ] :
if dirname . startswith ( '.' ) or dirname == '__pycache__' :
dirs . remove ( dirname )
for filename in files :
if filename . endswith ( ( '.pyo' , '.pyc' , '.py.class' ) ) :
continue
old_path = path . join ( root , filename )
new_path = path . join ( top_dir , relative_dir , filename . replace ( base_name , name ) )
if path . exists ( new_path ) :
raise CommandError ( "%s already exists, overlaying a " "project or app into an existing " "directory won't replace conflicting " "files" % new_path )
with open ( old_path , 'rb' ) as template_file :
content = template_file . read ( )
if filename . endswith ( extensions ) or filename in extra_files :
content = content . decode ( 'utf-8' )
template = Template ( content )
content = template . render ( context )
content = content . encode ( 'utf-8' )
with open ( new_path , 'wb' ) as new_file :
new_file . write ( content )
if self . verbosity >= 2 :
self . stdout . write ( "Creating %s\n" % new_path )
try :
shutil . copymode ( old_path , new_path )
self . make_writeable ( new_path )
except OSError :
self . stderr . write ( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path , self . style . NOTICE )
if self . paths_to_remove :
if self . verbosity >= 2 :
self . stdout . write ( "Cleaning up temporary files.\n" )
for path_to_remove in self . paths_to_remove :
if path . isfile ( path_to_remove ) :
os . remove ( path_to_remove )
else :
shutil . rmtree ( path_to_remove , onerror = rmtree_errorhandler )
def handle_template ( self , template , subdir ) :
if template is None :
return path . join ( django . __path__ [ 0 ] , 'conf' , subdir )
else :
if template . startswith ( 'file://' ) :
template = template [ 7 : ]
expanded_template = path . expanduser ( template )
expanded_template = path . normpath ( expanded_template )
if path . isdir ( expanded_template ) :
return expanded_template
if self . is_url ( template ) :
absolute_path = self . download ( template )
else :
absolute_path = path . abspath ( expanded_template )
if path . exists ( absolute_path ) :
return self . extract ( absolute_path )
raise CommandError ( "couldn't handle %s template %s." % ( self . app_or_project , template ) )
def validate_name ( self , name , app_or_project ) :
if name is None :
raise CommandError ( "you must provide %s %s name" % ( "an" if app_or_project == "app" else "a" , app_or_project ) )
if not re . search ( r'^[_a-zA-Z]\w*$' , name ) :
if not re . search ( r'^[_a-zA-Z]' , name ) :
message = 'make sure the name begins with a letter or underscore'
else :
message = 'use only numbers, letters and underscores'
raise CommandError ( "%r is not a valid %s name. Please %s." % ( name , app_or_project , message ) )
def download ( self , url ) :
def cleanup_url ( url ) :
tmp = url . rstrip ( '/' )
filename = tmp . split ( '/' ) [ - 1 ]
if url . endswith ( '/' ) :
display_url = tmp + '/'
else :
display_url = url
return filename , display_url
prefix = 'django_%s_template_' % self . app_or_project
tempdir = tempfile . mkdtemp ( prefix = prefix , suffix = '_download' )
self . paths_to_remove . append ( tempdir )
filename , display_url = cleanup_url ( url )
if self . verbosity >= 2 :
self . stdout . write ( "Downloading %s\n" % display_url )
try :
the_path , info = urlretrieve ( url , path . join ( tempdir , filename ) )
except IOError as e :
raise CommandError ( "couldn't download URL %s to %s: %s" % ( url , filename , e ) )
used_name = the_path . split ( '/' ) [ - 1 ]
content_disposition = info . get ( 'content-disposition' )
if content_disposition :
_ , params = cgi . parse_header ( content_disposition )
guessed_filename = params . get ( 'filename' ) or used_name
else :
guessed_filename = used_name
ext = self . splitext ( guessed_filename ) [ 1 ]
content_type = info . get ( 'content-type' )
if not ext and content_type :
ext = mimetypes . guess_extension ( content_type )
if ext :
guessed_filename += ext
if used_name != guessed_filename :
guessed_path = path . join ( tempdir , guessed_filename )
shutil . move ( the_path , guessed_path )
return guessed_path
return the_path
def splitext ( self , the_path ) :
base , ext = posixpath . splitext ( the_path )
if base . lower ( ) . endswith ( '.tar' ) :
ext = base [ - 4 : ] + ext
base = base [ : - 4 ]
return base , ext
def extract ( self , filename ) :
prefix = 'django_%s_template_' % self . app_or_project
tempdir = tempfile . mkdtemp ( prefix = prefix , suffix = '_extract' )
self . paths_to_remove . append ( tempdir )
if self . verbosity >= 2 :
self . stdout . write ( "Extracting %s\n" % filename )
try :
archive . extract ( filename , tempdir )
return tempdir
except ( archive . ArchiveException , IOError ) as e :
raise CommandError ( "couldn't extract file %s to %s: %s" % ( filename , tempdir , e ) )
def is_url ( self , template ) :
if ':' not in template :
return False
scheme = template . split ( ':' , 1 ) [ 0 ] . lower ( )
return scheme in self . url_schemes
def make_writeable ( self , filename ) :
if sys . platform . startswith ( 'java' ) :
return
if not os . access ( filename , os . W_OK ) :
st = os . stat ( filename )
new_permissions = stat . S_IMODE ( st . st_mode ) | stat . S_IWUSR
os . chmod ( filename , new_permissions )
from __future__ import unicode_literals
import os
from subprocess import PIPE , Popen
import sys
from django . utils . encoding import force_text , DEFAULT_LOCALE_ENCODING
from django . utils import six
from . base import CommandError
def popen_wrapper ( args , os_err_exc_type = CommandError ) :
try :
p = Popen ( args , shell = False , stdout = PIPE , stderr = PIPE , close_fds = os . name != 'nt' , universal_newlines = True )
except OSError as e :
strerror = force_text ( e . strerror , DEFAULT_LOCALE_ENCODING , strings_only = True )
six . reraise ( os_err_exc_type , os_err_exc_type ( 'Error executing %s: %s' % ( args [ 0 ] , strerror ) ) , sys . exc_info ( ) [ 2 ] )
output , errors = p . communicate ( )
return ( output , force_text ( errors , DEFAULT_LOCALE_ENCODING , strings_only = True ) , p . returncode )
def handle_extensions ( extensions = ( 'html' , ) , ignored = ( 'py' , ) ) :
ext_list = [ ]
for ext in extensions :
ext_list . extend ( ext . replace ( ' ' , '' ) . split ( ',' ) )
for i , ext in enumerate ( ext_list ) :
if not ext . startswith ( '.' ) :
ext_list [ i ] = '.%s' % ext_list [ i ]
return set ( x for x in ext_list if x . strip ( '.' ) not in ignored )
def find_command ( cmd , path = None , pathext = None ) :
if path is None :
path = os . environ . get ( 'PATH' , '' ) . split ( os . pathsep )
if isinstance ( path , six . string_types ) :
path = [ path ]
if pathext is None :
pathext = os . environ . get ( 'PATHEXT' , '.COM;.EXE;.BAT;.CMD' ) . split ( os . pathsep )
for ext in pathext :
if cmd . endswith ( ext ) :
pathext = [ '' ]
break
for p in path :
f = os . path . join ( p , cmd )
if os . path . isfile ( f ) :
return f
for ext in pathext :
fext = f + ext
if os . path . isfile ( fext ) :
return fext
return None
import collections
from math import ceil
from django . utils import six
class InvalidPage ( Exception ) :
pass
class PageNotAnInteger ( InvalidPage ) :
pass
class EmptyPage ( InvalidPage ) :
pass
class Paginator ( object ) :
def __init__ ( self , object_list , per_page , orphans = 0 , allow_empty_first_page = True ) :
self . object_list = object_list
self . per_page = int ( per_page )
self . orphans = int ( orphans )
self . allow_empty_first_page = allow_empty_first_page
self . _num_pages = self . _count = None
def validate_number ( self , number ) :
try :
number = int ( number )
except ( TypeError , ValueError ) :
raise PageNotAnInteger ( 'That page number is not an integer' )
if number < 1 :
raise EmptyPage ( 'That page number is less than 1' )
if number > self . num_pages :
if number == 1 and self . allow_empty_first_page :
pass
else :
raise EmptyPage ( 'That page contains no results' )
return number
def page ( self , number ) :
number = self . validate_number ( number )
bottom = ( number - 1 ) * self . per_page
top = bottom + self . per_page
if top + self . orphans >= self . count :
top = self . count
return self . _get_page ( self . object_list [ bottom : top ] , number , self )
def _get_page ( self , * args , ** kwargs ) :
return Page ( * args , ** kwargs )
def _get_count ( self ) :
if self . _count is None :
try :
self . _count = self . object_list . count ( )
except ( AttributeError , TypeError ) :
self . _count = len ( self . object_list )
return self . _count
count = property ( _get_count )
def _get_num_pages ( self ) :
if self . _num_pages is None :
if self . count == 0 and not self . allow_empty_first_page :
self . _num_pages = 0
else :
hits = max ( 1 , self . count - self . orphans )
self . _num_pages = int ( ceil ( hits / float ( self . per_page ) ) )
return self . _num_pages
num_pages = property ( _get_num_pages )
def _get_page_range ( self ) :
return range ( 1 , self . num_pages + 1 )
page_range = property ( _get_page_range )
QuerySetPaginator = Paginator
class Page ( collections . Sequence ) :
def __init__ ( self , object_list , number , paginator ) :
self . object_list = object_list
self . number = number
self . paginator = paginator
def __repr__ ( self ) :
return '' % ( self . number , self . paginator . num_pages )
def __len__ ( self ) :
return len ( self . object_list )
def __getitem__ ( self , index ) :
if not isinstance ( index , ( slice , ) + six . integer_types ) :
raise TypeError
if not isinstance ( self . object_list , list ) :
self . object_list = list ( self . object_list )
return self . object_list [ index ]
def has_next ( self ) :
return self . number < self . paginator . num_pages
def has_previous ( self ) :
return self . number > 1
def has_other_pages ( self ) :
return self . has_previous ( ) or self . has_next ( )
def next_page_number ( self ) :
return self . paginator . validate_number ( self . number + 1 )
def previous_page_number ( self ) :
return self . paginator . validate_number ( self . number - 1 )
def start_index ( self ) :
if self . paginator . count == 0 :
return 0
return ( self . paginator . per_page * ( self . number - 1 ) ) + 1
def end_index ( self ) :
if self . number == self . paginator . num_pages :
return self . paginator . count
return self . number * self . paginator . per_page
import importlib
from django . conf import settings
from django . utils import six
from django . core . serializers . base import SerializerDoesNotExist
BUILTIN_SERIALIZERS = { "xml" : "django.core.serializers.xml_serializer" , "python" : "django.core.serializers.python" , "json" : "django.core.serializers.json" , "yaml" : "django.core.serializers.pyyaml" , }
_serializers = { }
class BadSerializer ( object ) :
internal_use_only = False
def __init__ ( self , exception ) :
self . exception = exception
def __call__ ( self , * args , ** kwargs ) :
raise self . exception
def register_serializer ( format , serializer_module , serializers = None ) :
if serializers is None and not _serializers :
_load_serializers ( )
try :
module = importlib . import_module ( serializer_module )
except ImportError as exc :
bad_serializer = BadSerializer ( exc )
module = type ( 'BadSerializerModule' , ( object , ) , { 'Deserializer' : bad_serializer , 'Serializer' : bad_serializer , } )
if serializers is None :
_serializers [ format ] = module
else :
serializers [ format ] = module
def unregister_serializer ( format ) :
if not _serializers :
_load_serializers ( )
if format not in _serializers :
raise SerializerDoesNotExist ( format )
del _serializers [ format ]
def get_serializer ( format ) :
if not _serializers :
_load_serializers ( )
if format not in _serializers :
raise SerializerDoesNotExist ( format )
return _serializers [ format ] . Serializer
def get_serializer_formats ( ) :
if not _serializers :
_load_serializers ( )
return list ( _serializers )
def get_public_serializer_formats ( ) :
if not _serializers :
_load_serializers ( )
return [ k for k , v in six . iteritems ( _serializers ) if not v . Serializer . internal_use_only ]
def get_deserializer ( format ) :
if not _serializers :
_load_serializers ( )
if format not in _serializers :
raise SerializerDoesNotExist ( format )
return _serializers [ format ] . Deserializer
def serialize ( format , queryset , ** options ) :
s = get_serializer ( format ) ( )
s . serialize ( queryset , ** options )
return s . getvalue ( )
def deserialize ( format , stream_or_string , ** options ) :
d = get_deserializer ( format )
return d ( stream_or_string , ** options )
def _load_serializers ( ) :
global _serializers
serializers = { }
for format in BUILTIN_SERIALIZERS :
register_serializer ( format , BUILTIN_SERIALIZERS [ format ] , serializers )
if hasattr ( settings , "SERIALIZATION_MODULES" ) :
for format in settings . SERIALIZATION_MODULES :
register_serializer ( format , settings . SERIALIZATION_MODULES [ format ] , serializers )
_serializers = serializers
import warnings
from django . db import models
from django . utils import six
from django . utils . deprecation import RemovedInDjango19Warning
class SerializerDoesNotExist ( KeyError ) :
pass
class SerializationError ( Exception ) :
pass
class DeserializationError ( Exception ) :
pass
class Serializer ( object ) :
internal_use_only = False
def serialize ( self , queryset , ** options ) :
self . options = options
self . stream = options . pop ( "stream" , six . StringIO ( ) )
self . selected_fields = options . pop ( "fields" , None )
self . use_natural_keys = options . pop ( "use_natural_keys" , False )
if self . use_natural_keys :
warnings . warn ( "``use_natural_keys`` is deprecated; use ``use_natural_foreign_keys`` instead." , RemovedInDjango19Warning )
self . use_natural_foreign_keys = options . pop ( 'use_natural_foreign_keys' , False ) or self . use_natural_keys
self . use_natural_primary_keys = options . pop ( 'use_natural_primary_keys' , False )
self . start_serialization ( )
self . first = True
for obj in queryset :
self . start_object ( obj )
concrete_model = obj . _meta . concrete_model
for field in concrete_model . _meta . local_fields :
if field . serialize :
if field . rel is None :
if self . selected_fields is None or field . attname in self . selected_fields :
self . handle_field ( obj , field )
else :
if self . selected_fields is None or field . attname [ : - 3 ] in self . selected_fields :
self . handle_fk_field ( obj , field )
for field in concrete_model . _meta . many_to_many :
if field . serialize :
if self . selected_fields is None or field . attname in self . selected_fields :
self . handle_m2m_field ( obj , field )
self . end_object ( obj )
if self . first :
self . first = False
self . end_serialization ( )
return self . getvalue ( )
def start_serialization ( self ) :
raise NotImplementedError ( 'subclasses of Serializer must provide a start_serialization() method' )
def end_serialization ( self ) :
pass
def start_object ( self , obj ) :
raise NotImplementedError ( 'subclasses of Serializer must provide a start_object() method' )
def end_object ( self , obj ) :
pass
def handle_field ( self , obj , field ) :
raise NotImplementedError ( 'subclasses of Serializer must provide an handle_field() method' )
def handle_fk_field ( self , obj , field ) :
raise NotImplementedError ( 'subclasses of Serializer must provide an handle_fk_field() method' )
def handle_m2m_field ( self , obj , field ) :
raise NotImplementedError ( 'subclasses of Serializer must provide an handle_m2m_field() method' )
def getvalue ( self ) :
if callable ( getattr ( self . stream , 'getvalue' , None ) ) :
return self . stream . getvalue ( )
class Deserializer ( six . Iterator ) :
def __init__ ( self , stream_or_string , ** options ) :
self . options = options
if isinstance ( stream_or_string , six . string_types ) :
self . stream = six . StringIO ( stream_or_string )
else :
self . stream = stream_or_string
def __iter__ ( self ) :
return self
def __next__ ( self ) :
raise NotImplementedError ( 'subclasses of Deserializer must provide a __next__() method' )
class DeserializedObject ( object ) :
def __init__ ( self , obj , m2m_data = None ) :
self . object = obj
self . m2m_data = m2m_data
def __repr__ ( self ) :
return "" % ( self . object . _meta . app_label , self . object . _meta . object_name , self . object . pk )
def save ( self , save_m2m = True , using = None ) :
models . Model . save_base ( self . object , using = using , raw = True )
if self . m2m_data and save_m2m :
for accessor_name , object_list in self . m2m_data . items ( ) :
setattr ( self . object , accessor_name , object_list )
self . m2m_data = None
def build_instance ( Model , data , db ) :
obj = Model ( ** data )
if ( obj . pk is None and hasattr ( Model , 'natural_key' ) and hasattr ( Model . _default_manager , 'get_by_natural_key' ) ) :
natural_key = obj . natural_key ( )
try :
obj . pk = Model . _default_manager . db_manager ( db ) . get_by_natural_key ( * natural_key ) . pk
except Model . DoesNotExist :
pass
return obj
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import decimal
import json
import sys
from django . core . serializers . base import DeserializationError
from django . core . serializers . python import Serializer as PythonSerializer
from django . core . serializers . python import Deserializer as PythonDeserializer
from django . utils import six
from django . utils . timezone import is_aware
class Serializer ( PythonSerializer ) :
internal_use_only = False
def start_serialization ( self ) :
if json . __version__ . split ( '.' ) >= [ '2' , '1' , '3' ] :
self . options . update ( { 'use_decimal' : False } )
self . _current = None
self . json_kwargs = self . options . copy ( )
self . json_kwargs . pop ( 'stream' , None )
self . json_kwargs . pop ( 'fields' , None )
if self . options . get ( 'indent' ) :
self . json_kwargs [ 'separators' ] = ( ',' , ': ' )
self . stream . write ( "[" )
def end_serialization ( self ) :
if self . options . get ( "indent" ) :
self . stream . write ( "\n" )
self . stream . write ( "]" )
if self . options . get ( "indent" ) :
self . stream . write ( "\n" )
def end_object ( self , obj ) :
indent = self . options . get ( "indent" )
if not self . first :
self . stream . write ( "," )
if not indent :
self . stream . write ( " " )
if indent :
self . stream . write ( "\n" )
json . dump ( self . get_dump_object ( obj ) , self . stream , cls = DjangoJSONEncoder , ** self . json_kwargs )
self . _current = None
def getvalue ( self ) :
return super ( PythonSerializer , self ) . getvalue ( )
def Deserializer ( stream_or_string , ** options ) :
if not isinstance ( stream_or_string , ( bytes , six . string_types ) ) :
stream_or_string = stream_or_string . read ( )
if isinstance ( stream_or_string , bytes ) :
stream_or_string = stream_or_string . decode ( 'utf-8' )
try :
objects = json . loads ( stream_or_string )
for obj in PythonDeserializer ( objects , ** options ) :
yield obj
except GeneratorExit :
raise
except Exception as e :
six . reraise ( DeserializationError , DeserializationError ( e ) , sys . exc_info ( ) [ 2 ] )
class DjangoJSONEncoder ( json . JSONEncoder ) :
def default ( self , o ) :
if isinstance ( o , datetime . datetime ) :
r = o . isoformat ( )
if o . microsecond :
r = r [ : 23 ] + r [ 26 : ]
if r . endswith ( '+00:00' ) :
r = r [ : - 6 ] + 'Z'
return r
elif isinstance ( o , datetime . date ) :
return o . isoformat ( )
elif isinstance ( o , datetime . time ) :
if is_aware ( o ) :
raise ValueError ( "JSON can't represent timezone-aware times." )
r = o . isoformat ( )
if o . microsecond :
r = r [ : 12 ]
return r
elif isinstance ( o , decimal . Decimal ) :
return str ( o )
else :
return super ( DjangoJSONEncoder , self ) . default ( o )
DateTimeAwareJSONEncoder = DjangoJSONEncoder
from __future__ import unicode_literals
from django . apps import apps
from django . conf import settings
from django . core . serializers import base
from django . db import models , DEFAULT_DB_ALIAS
from django . utils . encoding import smart_text , is_protected_type
from django . utils import six
class Serializer ( base . Serializer ) :
internal_use_only = True
def start_serialization ( self ) :
self . _current = None
self . objects = [ ]
def end_serialization ( self ) :
pass
def start_object ( self , obj ) :
self . _current = { }
def end_object ( self , obj ) :
self . objects . append ( self . get_dump_object ( obj ) )
self . _current = None
def get_dump_object ( self , obj ) :
data = { "model" : smart_text ( obj . _meta ) , "fields" : self . _current , }
if not self . use_natural_primary_keys or not hasattr ( obj , 'natural_key' ) :
data [ "pk" ] = smart_text ( obj . _get_pk_val ( ) , strings_only = True )
return data
def handle_field ( self , obj , field ) :
value = field . _get_val_from_obj ( obj )
if is_protected_type ( value ) :
self . _current [ field . name ] = value
else :
self . _current [ field . name ] = field . value_to_string ( obj )
def handle_fk_field ( self , obj , field ) :
if self . use_natural_foreign_keys and hasattr ( field . rel . to , 'natural_key' ) :
related = getattr ( obj , field . name )
if related :
value = related . natural_key ( )
else :
value = None
else :
value = getattr ( obj , field . get_attname ( ) )
self . _current [ field . name ] = value
def handle_m2m_field ( self , obj , field ) :
if field . rel . through . _meta . auto_created :
if self . use_natural_foreign_keys and hasattr ( field . rel . to , 'natural_key' ) :
m2m_value = lambda value : value . natural_key ( )
else :
m2m_value = lambda value : smart_text ( value . _get_pk_val ( ) , strings_only = True )
self . _current [ field . name ] = [ m2m_value ( related ) for related in getattr ( obj , field . name ) . iterator ( ) ]
def getvalue ( self ) :
return self . objects
def Deserializer ( object_list , ** options ) :
db = options . pop ( 'using' , DEFAULT_DB_ALIAS )
ignore = options . pop ( 'ignorenonexistent' , False )
for d in object_list :
try :
Model = _get_model ( d [ "model" ] )
except base . DeserializationError :
if ignore :
continue
else :
raise
data = { }
if 'pk' in d :
data [ Model . _meta . pk . attname ] = Model . _meta . pk . to_python ( d . get ( "pk" , None ) )
m2m_data = { }
model_fields = Model . _meta . get_all_field_names ( )
for ( field_name , field_value ) in six . iteritems ( d [ "fields" ] ) :
if ignore and field_name not in model_fields :
continue
if isinstance ( field_value , str ) :
field_value = smart_text ( field_value , options . get ( "encoding" , settings . DEFAULT_CHARSET ) , strings_only = True )
field = Model . _meta . get_field ( field_name )
if field . rel and isinstance ( field . rel , models . ManyToManyRel ) :
if hasattr ( field . rel . to . _default_manager , 'get_by_natural_key' ) :
def m2m_convert ( value ) :
if hasattr ( value , '__iter__' ) and not isinstance ( value , six . text_type ) :
return field . rel . to . _default_manager . db_manager ( db ) . get_by_natural_key ( * value ) . pk
else :
return smart_text ( field . rel . to . _meta . pk . to_python ( value ) )
else :
m2m_convert = lambda v : smart_text ( field . rel . to . _meta . pk . to_python ( v ) )
m2m_data [ field . name ] = [ m2m_convert ( pk ) for pk in field_value ]
elif field . rel and isinstance ( field . rel , models . ManyToOneRel ) :
if field_value is not None :
if hasattr ( field . rel . to . _default_manager , 'get_by_natural_key' ) :
if hasattr ( field_value , '__iter__' ) and not isinstance ( field_value , six . text_type ) :
obj = field . rel . to . _default_manager . db_manager ( db ) . get_by_natural_key ( * field_value )
value = getattr ( obj , field . rel . field_name )
if field . rel . to . _meta . pk . rel :
value = value . pk
else :
value = field . rel . to . _meta . get_field ( field . rel . field_name ) . to_python ( field_value )
data [ field . attname ] = value
else :
data [ field . attname ] = field . rel . to . _meta . get_field ( field . rel . field_name ) . to_python ( field_value )
else :
data [ field . attname ] = None
else :
data [ field . name ] = field . to_python ( field_value )
obj = base . build_instance ( Model , data , db )
yield base . DeserializedObject ( obj , m2m_data )
def _get_model ( model_identifier ) :
try :
return apps . get_model ( model_identifier )
except ( LookupError , TypeError ) :
raise base . DeserializationError ( "Invalid model identifier: '%s'" % model_identifier )
import decimal
import yaml
import sys
from io import StringIO
from django . db import models
from django . core . serializers . base import DeserializationError
from django . core . serializers . python import Serializer as PythonSerializer
from django . core . serializers . python import Deserializer as PythonDeserializer
from django . utils import six
try :
from yaml import CSafeLoader as SafeLoader
from yaml import CSafeDumper as SafeDumper
except ImportError :
from yaml import SafeLoader , SafeDumper
class DjangoSafeDumper ( SafeDumper ) :
def represent_decimal ( self , data ) :
return self . represent_scalar ( 'tag:yaml.org,2002:str' , str ( data ) )
DjangoSafeDumper . add_representer ( decimal . Decimal , DjangoSafeDumper . represent_decimal )
class Serializer ( PythonSerializer ) :
internal_use_only = False
def handle_field ( self , obj , field ) :
if isinstance ( field , models . TimeField ) and getattr ( obj , field . name ) is not None :
self . _current [ field . name ] = str ( getattr ( obj , field . name ) )
else :
super ( Serializer , self ) . handle_field ( obj , field )
def end_serialization ( self ) :
yaml . dump ( self . objects , self . stream , Dumper = DjangoSafeDumper , ** self . options )
def getvalue ( self ) :
return super ( PythonSerializer , self ) . getvalue ( )
def Deserializer ( stream_or_string , ** options ) :
if isinstance ( stream_or_string , bytes ) :
stream_or_string = stream_or_string . decode ( 'utf-8' )
if isinstance ( stream_or_string , six . string_types ) :
stream = StringIO ( stream_or_string )
else :
stream = stream_or_string
try :
for obj in PythonDeserializer ( yaml . load ( stream , Loader = SafeLoader ) , ** options ) :
yield obj
except GeneratorExit :
raise
except Exception as e :
six . reraise ( DeserializationError , DeserializationError ( e ) , sys . exc_info ( ) [ 2 ] )
from __future__ import unicode_literals
from django . apps import apps
from django . conf import settings
from django . core . serializers import base
from django . db import models , DEFAULT_DB_ALIAS
from django . utils . xmlutils import SimplerXMLGenerator
from django . utils . encoding import smart_text
from xml . dom import pulldom
from xml . sax import handler
from xml . sax . expatreader import ExpatParser as _ExpatParser
class Serializer ( base . Serializer ) :
def indent ( self , level ) :
if self . options . get ( 'indent' , None ) is not None :
self . xml . ignorableWhitespace ( '\n' + ' ' * self . options . get ( 'indent' , None ) * level )
def start_serialization ( self ) :
self . xml = SimplerXMLGenerator ( self . stream , self . options . get ( "encoding" , settings . DEFAULT_CHARSET ) )
self . xml . startDocument ( )
self . xml . startElement ( "django-objects" , { "version" : "1.0" } )
def end_serialization ( self ) :
self . indent ( 0 )
self . xml . endElement ( "django-objects" )
self . xml . endDocument ( )
def start_object ( self , obj ) :
if not hasattr ( obj , "_meta" ) :
raise base . SerializationError ( "Non-model object (%s) encountered during serialization" % type ( obj ) )
self . indent ( 1 )
attrs = { "model" : smart_text ( obj . _meta ) }
if not self . use_natural_primary_keys or not hasattr ( obj , 'natural_key' ) :
obj_pk = obj . _get_pk_val ( )
if obj_pk is not None :
attrs [ 'pk' ] = smart_text ( obj_pk )
self . xml . startElement ( "object" , attrs )
def end_object ( self , obj ) :
self . indent ( 1 )
self . xml . endElement ( "object" )
def handle_field ( self , obj , field ) :
self . indent ( 2 )
self . xml . startElement ( "field" , { "name" : field . name , "type" : field . get_internal_type ( ) } )
if getattr ( obj , field . name ) is not None :
self . xml . characters ( field . value_to_string ( obj ) )
else :
self . xml . addQuickElement ( "None" )
self . xml . endElement ( "field" )
def handle_fk_field ( self , obj , field ) :
self . _start_relational_field ( field )
related_att = getattr ( obj , field . get_attname ( ) )
if related_att is not None :
if self . use_natural_foreign_keys and hasattr ( field . rel . to , 'natural_key' ) :
related = getattr ( obj , field . name )
related = related . natural_key ( )
for key_value in related :
self . xml . startElement ( "natural" , { } )
self . xml . characters ( smart_text ( key_value ) )
self . xml . endElement ( "natural" )
else :
self . xml . characters ( smart_text ( related_att ) )
else :
self . xml . addQuickElement ( "None" )
self . xml . endElement ( "field" )
def handle_m2m_field ( self , obj , field ) :
if field . rel . through . _meta . auto_created :
self . _start_relational_field ( field )
if self . use_natural_foreign_keys and hasattr ( field . rel . to , 'natural_key' ) :
def handle_m2m ( value ) :
natural = value . natural_key ( )
self . xml . startElement ( "object" , { } )
for key_value in natural :
self . xml . startElement ( "natural" , { } )
self . xml . characters ( smart_text ( key_value ) )
self . xml . endElement ( "natural" )
self . xml . endElement ( "object" )
else :
def handle_m2m ( value ) :
self . xml . addQuickElement ( "object" , attrs = { 'pk' : smart_text ( value . _get_pk_val ( ) ) } )
for relobj in getattr ( obj , field . name ) . iterator ( ) :
handle_m2m ( relobj )
self . xml . endElement ( "field" )
def _start_relational_field ( self , field ) :
self . indent ( 2 )
self . xml . startElement ( "field" , { "name" : field . name , "rel" : field . rel . __class__ . __name__ , "to" : smart_text ( field . rel . to . _meta ) , } )
class Deserializer ( base . Deserializer ) :
def __init__ ( self , stream_or_string , ** options ) :
super ( Deserializer , self ) . __init__ ( stream_or_string , ** options )
self . event_stream = pulldom . parse ( self . stream , self . _make_parser ( ) )
self . db = options . pop ( 'using' , DEFAULT_DB_ALIAS )
self . ignore = options . pop ( 'ignorenonexistent' , False )
def _make_parser ( self ) :
return DefusedExpatParser ( )
def __next__ ( self ) :
for event , node in self . event_stream :
if event == "START_ELEMENT" and node . nodeName == "object" :
self . event_stream . expandNode ( node )
return self . _handle_object ( node )
raise StopIteration
def _handle_object ( self , node ) :
Model = self . _get_model_from_node ( node , "model" )
data = { }
if node . hasAttribute ( 'pk' ) :
data [ Model . _meta . pk . attname ] = Model . _meta . pk . to_python ( node . getAttribute ( 'pk' ) )
m2m_data = { }
model_fields = Model . _meta . get_all_field_names ( )
for field_node in node . getElementsByTagName ( "field" ) :
field_name = field_node . getAttribute ( "name" )
if not field_name :
raise base . DeserializationError ( " node is missing the 'name' attribute" )
if self . ignore and field_name not in model_fields :
continue
field = Model . _meta . get_field ( field_name )
if field . rel and isinstance ( field . rel , models . ManyToManyRel ) :
m2m_data [ field . name ] = self . _handle_m2m_field_node ( field_node , field )
elif field . rel and isinstance ( field . rel , models . ManyToOneRel ) :
data [ field . attname ] = self . _handle_fk_field_node ( field_node , field )
else :
if field_node . getElementsByTagName ( 'None' ) :
value = None
else :
value = field . to_python ( getInnerText ( field_node ) . strip ( ) )
data [ field . name ] = value
obj = base . build_instance ( Model , data , self . db )
return base . DeserializedObject ( obj , m2m_data )
def _handle_fk_field_node ( self , node , field ) :
if node . getElementsByTagName ( 'None' ) :
return None
else :
if hasattr ( field . rel . to . _default_manager , 'get_by_natural_key' ) :
keys = node . getElementsByTagName ( 'natural' )
if keys :
field_value = [ getInnerText ( k ) . strip ( ) for k in keys ]
obj = field . rel . to . _default_manager . db_manager ( self . db ) . get_by_natural_key ( * field_value )
obj_pk = getattr ( obj , field . rel . field_name )
if field . rel . to . _meta . pk . rel :
obj_pk = obj_pk . pk
else :
field_value = getInnerText ( node ) . strip ( )
obj_pk = field . rel . to . _meta . get_field ( field . rel . field_name ) . to_python ( field_value )
return obj_pk
else :
field_value = getInnerText ( node ) . strip ( )
return field . rel . to . _meta . get_field ( field . rel . field_name ) . to_python ( field_value )
def _handle_m2m_field_node ( self , node , field ) :
if hasattr ( field . rel . to . _default_manager , 'get_by_natural_key' ) :
def m2m_convert ( n ) :
keys = n . getElementsByTagName ( 'natural' )
if keys :
field_value = [ getInnerText ( k ) . strip ( ) for k in keys ]
obj_pk = field . rel . to . _default_manager . db_manager ( self . db ) . get_by_natural_key ( * field_value ) . pk
else :
obj_pk = field . rel . to . _meta . pk . to_python ( n . getAttribute ( 'pk' ) )
return obj_pk
else :
m2m_convert = lambda n : field . rel . to . _meta . pk . to_python ( n . getAttribute ( 'pk' ) )
return [ m2m_convert ( c ) for c in node . getElementsByTagName ( "object" ) ]
def _get_model_from_node ( self , node , attr ) :
model_identifier = node . getAttribute ( attr )
if not model_identifier :
raise base . DeserializationError ( "<%s> node is missing the required '%s' attribute" % ( node . nodeName , attr ) )
try :
return apps . get_model ( model_identifier )
except ( LookupError , TypeError ) :
raise base . DeserializationError ( "<%s> node has invalid model identifier: '%s'" % ( node . nodeName , model_identifier ) )
def getInnerText ( node ) :
inner_text = [ ]
for child in node . childNodes :
if child . nodeType == child . TEXT_NODE or child . nodeType == child . CDATA_SECTION_NODE :
inner_text . append ( child . data )
elif child . nodeType == child . ELEMENT_NODE :
inner_text . extend ( getInnerText ( child ) )
else :
pass
return "" . join ( inner_text )
class DefusedExpatParser ( _ExpatParser ) :
def __init__ ( self , * args , ** kwargs ) :
_ExpatParser . __init__ ( self , * args , ** kwargs )
self . setFeature ( handler . feature_external_ges , False )
self . setFeature ( handler . feature_external_pes , False )
def start_doctype_decl ( self , name , sysid , pubid , has_internal_subset ) :
raise DTDForbidden ( name , sysid , pubid )
def entity_decl ( self , name , is_parameter_entity , value , base , sysid , pubid , notation_name ) :
raise EntitiesForbidden ( name , value , base , sysid , pubid , notation_name )
def unparsed_entity_decl ( self , name , base , sysid , pubid , notation_name ) :
raise EntitiesForbidden ( name , None , base , sysid , pubid , notation_name )
def external_entity_ref_handler ( self , context , base , sysid , pubid ) :
raise ExternalReferenceForbidden ( context , base , sysid , pubid )
def reset ( self ) :
_ExpatParser . reset ( self )
parser = self . _parser
parser . StartDoctypeDeclHandler = self . start_doctype_decl
parser . EntityDeclHandler = self . entity_decl
parser . UnparsedEntityDeclHandler = self . unparsed_entity_decl
parser . ExternalEntityRefHandler = self . external_entity_ref_handler
class DefusedXmlException ( ValueError ) :
def __repr__ ( self ) :
return str ( self )
class DTDForbidden ( DefusedXmlException ) :
def __init__ ( self , name , sysid , pubid ) :
super ( DTDForbidden , self ) . __init__ ( )
self . name = name
self . sysid = sysid
self . pubid = pubid
def __str__ ( self ) :
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl . format ( self . name , self . sysid , self . pubid )
class EntitiesForbidden ( DefusedXmlException ) :
def __init__ ( self , name , value , base , sysid , pubid , notation_name ) :
super ( EntitiesForbidden , self ) . __init__ ( )
self . name = name
self . value = value
self . base = base
self . sysid = sysid
self . pubid = pubid
self . notation_name = notation_name
def __str__ ( self ) :
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl . format ( self . name , self . sysid , self . pubid )
class ExternalReferenceForbidden ( DefusedXmlException ) :
def __init__ ( self , context , base , sysid , pubid ) :
super ( ExternalReferenceForbidden , self ) . __init__ ( )
self . context = context
self . base = base
self . sysid = sysid
self . pubid = pubid
def __str__ ( self ) :
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
return tpl . format ( self . sysid , self . pubid )
from __future__ import unicode_literals
import socket
import sys
from wsgiref import simple_server
from wsgiref . util import FileWrapper
from django . core . exceptions import ImproperlyConfigured
from django . core . management . color import color_style
from django . core . wsgi import get_wsgi_application
from django . utils import six
from django . utils . module_loading import import_string
from django . utils . six . moves import socketserver
__all__ = ( 'WSGIServer' , 'WSGIRequestHandler' )
def get_internal_wsgi_application ( ) :
from django . conf import settings
app_path = getattr ( settings , 'WSGI_APPLICATION' )
if app_path is None :
return get_wsgi_application ( )
try :
return import_string ( app_path )
except ImportError as e :
msg = ( "WSGI application '%(app_path)s' could not be loaded; " "Error importing module: '%(exception)s'" % ( { 'app_path' : app_path , 'exception' : e , } ) )
six . reraise ( ImproperlyConfigured , ImproperlyConfigured ( msg ) , sys . exc_info ( ) [ 2 ] )
class WSGIServer ( simple_server . WSGIServer , object ) :
request_queue_size = 10
def __init__ ( self , * args , ** kwargs ) :
if kwargs . pop ( 'ipv6' , False ) :
self . address_family = socket . AF_INET6
super ( WSGIServer , self ) . __init__ ( * args , ** kwargs )
def server_bind ( self ) :
super ( WSGIServer , self ) . server_bind ( )
self . setup_environ ( )
class WSGIRequestHandler ( simple_server . WSGIRequestHandler , object ) :
def __init__ ( self , * args , ** kwargs ) :
self . style = color_style ( )
super ( WSGIRequestHandler , self ) . __init__ ( * args , ** kwargs )
def address_string ( self ) :
return self . client_address [ 0 ]
def log_message ( self , format , * args ) :
msg = "[%s] %s\n" % ( self . log_date_time_string ( ) , format % args )
if args [ 1 ] [ 0 ] == '2' :
msg = self . style . HTTP_SUCCESS ( msg )
elif args [ 1 ] [ 0 ] == '1' :
msg = self . style . HTTP_INFO ( msg )
elif args [ 1 ] == '304' :
msg = self . style . HTTP_NOT_MODIFIED ( msg )
elif args [ 1 ] [ 0 ] == '3' :
msg = self . style . HTTP_REDIRECT ( msg )
elif args [ 1 ] == '404' :
msg = self . style . HTTP_NOT_FOUND ( msg )
elif args [ 1 ] [ 0 ] == '4' :
msg = self . style . HTTP_BAD_REQUEST ( msg )
else :
msg = self . style . HTTP_SERVER_ERROR ( msg )
sys . stderr . write ( msg )
def run ( addr , port , wsgi_handler , ipv6 = False , threading = False ) :
server_address = ( addr , port )
if threading :
httpd_cls = type ( str ( 'WSGIServer' ) , ( socketserver . ThreadingMixIn , WSGIServer ) , { } )
else :
httpd_cls = WSGIServer
httpd = httpd_cls ( server_address , WSGIRequestHandler , ipv6 = ipv6 )
if threading :
httpd . daemon_threads = True
httpd . set_app ( wsgi_handler )
httpd . serve_forever ( )
import importlib
import os
import sys
__version__ = "0.1"
__all__ = [ "runfastcgi" ]
FASTCGI_OPTIONS = { 'protocol' : 'fcgi' , 'host' : None , 'port' : None , 'socket' : None , 'method' : 'fork' , 'daemonize' : None , 'workdir' : '/' , 'pidfile' : None , 'maxspare' : 5 , 'minspare' : 2 , 'maxchildren' : 50 , 'maxrequests' : 0 , 'debug' : None , 'outlog' : None , 'errlog' : None , 'umask' : None , }
def fastcgi_help ( message = None ) :
print ( FASTCGI_HELP )
if message :
print ( message )
return False
def runfastcgi ( argset = [ ] , ** kwargs ) :
options = FASTCGI_OPTIONS . copy ( )
options . update ( kwargs )
for x in argset :
if "=" in x :
k , v = x . split ( '=' , 1 )
else :
k , v = x , True
options [ k . lower ( ) ] = v
if "help" in options :
return fastcgi_help ( )
try :
import flup
except ImportError as e :
sys . stderr . write ( "ERROR: %s\n" % e )
sys . stderr . write ( " Unable to load the flup package. In order to run django\n" )
sys . stderr . write ( " as a FastCGI application, you will need to get flup from\n" )
sys . stderr . write ( " http://www.saddi.com/software/flup/ If you've already\n" )
sys . stderr . write ( " installed flup, then make sure you have it in your PYTHONPATH.\n" )
return False
flup_module = 'server.' + options [ 'protocol' ]
if options [ 'method' ] in ( 'prefork' , 'fork' ) :
wsgi_opts = { 'maxSpare' : int ( options [ "maxspare" ] ) , 'minSpare' : int ( options [ "minspare" ] ) , 'maxChildren' : int ( options [ "maxchildren" ] ) , 'maxRequests' : int ( options [ "maxrequests" ] ) , }
flup_module += '_fork'
elif options [ 'method' ] in ( 'thread' , 'threaded' ) :
wsgi_opts = { 'maxSpare' : int ( options [ "maxspare" ] ) , 'minSpare' : int ( options [ "minspare" ] ) , 'maxThreads' : int ( options [ "maxchildren" ] ) , }
else :
return fastcgi_help ( "ERROR: Implementation must be one of prefork or " "thread." )
wsgi_opts [ 'debug' ] = options [ 'debug' ] is not None
try :
module = importlib . import_module ( '.%s' % flup_module , 'flup' )
WSGIServer = module . WSGIServer
except Exception :
print ( "Can't import flup." + flup_module )
return False
from django . core . servers . basehttp import get_internal_wsgi_application
if options [ "host" ] and options [ "port" ] and not options [ "socket" ] :
wsgi_opts [ 'bindAddress' ] = ( options [ "host" ] , int ( options [ "port" ] ) )
elif options [ "socket" ] and not options [ "host" ] and not options [ "port" ] :
wsgi_opts [ 'bindAddress' ] = options [ "socket" ]
elif not options [ "socket" ] and not options [ "host" ] and not options [ "port" ] :
wsgi_opts [ 'bindAddress' ] = None
else :
return fastcgi_help ( "Invalid combination of host, port, socket." )
if options [ "daemonize" ] is None :
daemonize = ( wsgi_opts [ 'bindAddress' ] is not None )
else :
if options [ "daemonize" ] . lower ( ) in ( 'true' , 'yes' , 't' ) :
daemonize = True
elif options [ "daemonize" ] . lower ( ) in ( 'false' , 'no' , 'f' ) :
daemonize = False
else :
return fastcgi_help ( "ERROR: Invalid option for daemonize " "parameter." )
daemon_kwargs = { }
if options [ 'outlog' ] :
daemon_kwargs [ 'out_log' ] = options [ 'outlog' ]
if options [ 'errlog' ] :
daemon_kwargs [ 'err_log' ] = options [ 'errlog' ]
if options [ 'umask' ] :
daemon_kwargs [ 'umask' ] = int ( options [ 'umask' ] , 8 )
if daemonize :
from django . utils . daemonize import become_daemon
become_daemon ( our_home_dir = options [ "workdir" ] , ** daemon_kwargs )
if options [ "pidfile" ] :
with open ( options [ "pidfile" ] , "w" ) as fp :
fp . write ( "%d\n" % os . getpid ( ) )
WSGIServer ( get_internal_wsgi_application ( ) , ** wsgi_opts ) . run ( )
if __name__ == '__main__' :
runfastcgi ( sys . argv [ 1 : ] )
from django . dispatch import Signal
request_started = Signal ( )
request_finished = Signal ( )
got_request_exception = Signal ( providing_args = [ "request" ] )
from __future__ import unicode_literals
import base64
import json
import time
import zlib
from django . conf import settings
from django . utils import baseconv
from django . utils . crypto import constant_time_compare , salted_hmac
from django . utils . encoding import force_bytes , force_str , force_text
from django . utils . module_loading import import_string
class BadSignature ( Exception ) :
pass
class SignatureExpired ( BadSignature ) :
pass
def b64_encode ( s ) :
return base64 . urlsafe_b64encode ( s ) . strip ( b'=' )
def b64_decode ( s ) :
pad = b'=' * ( - len ( s ) % 4 )
return base64 . urlsafe_b64decode ( s + pad )
def base64_hmac ( salt , value , key ) :
return b64_encode ( salted_hmac ( salt , value , key ) . digest ( ) )
def get_cookie_signer ( salt = 'django.core.signing.get_cookie_signer' ) :
Signer = import_string ( settings . SIGNING_BACKEND )
key = force_bytes ( settings . SECRET_KEY )
return Signer ( b'django.http.cookies' + key , salt = salt )
class JSONSerializer ( object ) :
def dumps ( self , obj ) :
return json . dumps ( obj , separators = ( ',' , ':' ) ) . encode ( 'latin-1' )
def loads ( self , data ) :
return json . loads ( data . decode ( 'latin-1' ) )
def dumps ( obj , key = None , salt = 'django.core.signing' , serializer = JSONSerializer , compress = False ) :
data = serializer ( ) . dumps ( obj )
is_compressed = False
if compress :
compressed = zlib . compress ( data )
if len ( compressed ) < ( len ( data ) - 1 ) :
data = compressed
is_compressed = True
base64d = b64_encode ( data )
if is_compressed :
base64d = b'.' + base64d
return TimestampSigner ( key , salt = salt ) . sign ( base64d )
def loads ( s , key = None , salt = 'django.core.signing' , serializer = JSONSerializer , max_age = None ) :
base64d = force_bytes ( TimestampSigner ( key , salt = salt ) . unsign ( s , max_age = max_age ) )
decompress = False
if base64d [ : 1 ] == b'.' :
base64d = base64d [ 1 : ]
decompress = True
data = b64_decode ( base64d )
if decompress :
data = zlib . decompress ( data )
return serializer ( ) . loads ( data )
class Signer ( object ) :
def __init__ ( self , key = None , sep = ':' , salt = None ) :
self . sep = force_str ( sep )
self . key = key or settings . SECRET_KEY
self . salt = force_str ( salt or '%s.%s' % ( self . __class__ . __module__ , self . __class__ . __name__ ) )
def signature ( self , value ) :
signature = base64_hmac ( self . salt + 'signer' , value , self . key )
return force_str ( signature )
def sign ( self , value ) :
value = force_str ( value )
return str ( '%s%s%s' ) % ( value , self . sep , self . signature ( value ) )
def unsign ( self , signed_value ) :
signed_value = force_str ( signed_value )
if self . sep not in signed_value :
raise BadSignature ( 'No "%s" found in value' % self . sep )
value , sig = signed_value . rsplit ( self . sep , 1 )
if constant_time_compare ( sig , self . signature ( value ) ) :
return force_text ( value )
raise BadSignature ( 'Signature "%s" does not match' % sig )
class TimestampSigner ( Signer ) :
def timestamp ( self ) :
return baseconv . base62 . encode ( int ( time . time ( ) ) )
def sign ( self , value ) :
value = force_str ( value )
value = str ( '%s%s%s' ) % ( value , self . sep , self . timestamp ( ) )
return super ( TimestampSigner , self ) . sign ( value )
def unsign ( self , value , max_age = None ) :
result = super ( TimestampSigner , self ) . unsign ( value )
value , timestamp = result . rsplit ( self . sep , 1 )
timestamp = baseconv . base62 . decode ( timestamp )
if max_age is not None :
age = time . time ( ) - timestamp
if age > max_age :
raise SignatureExpired ( 'Signature age %s > %s seconds' % ( age , max_age ) )
return value
from __future__ import unicode_literals
import functools
from importlib import import_module
import re
from threading import local
import warnings
from django . http import Http404
from django . core . exceptions import ImproperlyConfigured , ViewDoesNotExist
from django . utils . datastructures import MultiValueDict
from django . utils . deprecation import RemovedInDjango20Warning
from django . utils . encoding import force_str , force_text , iri_to_uri
from django . utils . functional import lazy
from django . utils . http import urlquote
from django . utils . module_loading import module_has_submodule
from django . utils . regex_helper import normalize
from django . utils import six , lru_cache
from django . utils . translation import get_language
_prefixes = local ( )
_urlconfs = local ( )
class ResolverMatch ( object ) :
def __init__ ( self , func , args , kwargs , url_name = None , app_name = None , namespaces = None ) :
self . func = func
self . args = args
self . kwargs = kwargs
self . url_name = url_name
self . app_name = app_name
if namespaces :
self . namespaces = [ x for x in namespaces if x ]
else :
self . namespaces = [ ]
self . namespace = ':' . join ( self . namespaces )
if not hasattr ( func , '__name__' ) :
self . _func_path = '.' . join ( [ func . __class__ . __module__ , func . __class__ . __name__ ] )
else :
self . _func_path = '.' . join ( [ func . __module__ , func . __name__ ] )
view_path = url_name or self . _func_path
self . view_name = ':' . join ( self . namespaces + [ view_path ] )
def __getitem__ ( self , index ) :
return ( self . func , self . args , self . kwargs ) [ index ]
def __repr__ ( self ) :
return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name=%s, app_name=%s, namespaces=%s)" % ( self . _func_path , self . args , self . kwargs , self . url_name , self . app_name , self . namespaces )
class Resolver404 ( Http404 ) :
pass
class NoReverseMatch ( Exception ) :
pass
@ lru_cache . lru_cache ( maxsize = None )
def get_callable ( lookup_view , can_fail = False ) :
if callable ( lookup_view ) :
return lookup_view
mod_name , func_name = get_mod_func ( lookup_view )
if not func_name :
if can_fail :
return lookup_view
else :
raise ImportError ( "Could not import '%s'. The path must be fully qualified." % lookup_view )
try :
mod = import_module ( mod_name )
except ImportError :
if can_fail :
return lookup_view
else :
parentmod , submod = get_mod_func ( mod_name )
if submod and not module_has_submodule ( import_module ( parentmod ) , submod ) :
raise ViewDoesNotExist ( "Could not import '%s'. Parent module %s does not exist." % ( lookup_view , mod_name ) )
else :
raise
else :
try :
view_func = getattr ( mod , func_name )
except AttributeError :
if can_fail :
return lookup_view
else :
raise ViewDoesNotExist ( "Could not import '%s'. View does not exist in module %s." % ( lookup_view , mod_name ) )
else :
if not callable ( view_func ) :
raise ViewDoesNotExist ( "Could not import '%s.%s'. View is not callable." % ( mod_name , func_name ) )
return view_func
@ lru_cache . lru_cache ( maxsize = None )
def get_resolver ( urlconf ) :
if urlconf is None :
from django . conf import settings
urlconf = settings . ROOT_URLCONF
return RegexURLResolver ( r'^/' , urlconf )
@ lru_cache . lru_cache ( maxsize = None )
def get_ns_resolver ( ns_pattern , resolver ) :
ns_resolver = RegexURLResolver ( ns_pattern , resolver . url_patterns )
return RegexURLResolver ( r'^/' , [ ns_resolver ] )
def get_mod_func ( callback ) :
try :
dot = callback . rindex ( '.' )
except ValueError :
return callback , ''
return callback [ : dot ] , callback [ dot + 1 : ]
class LocaleRegexProvider ( object ) :
def __init__ ( self , regex ) :
self . _regex = regex
self . _regex_dict = { }
@ property
def regex ( self ) :
language_code = get_language ( )
if language_code not in self . _regex_dict :
if isinstance ( self . _regex , six . string_types ) :
regex = self . _regex
else :
regex = force_text ( self . _regex )
try :
compiled_regex = re . compile ( regex , re . UNICODE )
except re . error as e :
raise ImproperlyConfigured ( '"%s" is not a valid regular expression: %s' % ( regex , six . text_type ( e ) ) )
self . _regex_dict [ language_code ] = compiled_regex
return self . _regex_dict [ language_code ]
class RegexURLPattern ( LocaleRegexProvider ) :
def __init__ ( self , regex , callback , default_args = None , name = None ) :
LocaleRegexProvider . __init__ ( self , regex )
if callable ( callback ) :
self . _callback = callback
else :
self . _callback = None
self . _callback_str = callback
self . default_args = default_args or { }
self . name = name
def __repr__ ( self ) :
return force_str ( '<%s %s %s>' % ( self . __class__ . __name__ , self . name , self . regex . pattern ) )
def add_prefix ( self , prefix ) :
if not prefix or not hasattr ( self , '_callback_str' ) :
return
self . _callback_str = prefix + '.' + self . _callback_str
def resolve ( self , path ) :
match = self . regex . search ( path )
if match :
kwargs = match . groupdict ( )
if kwargs :
args = ( )
else :
args = match . groups ( )
kwargs . update ( self . default_args )
return ResolverMatch ( self . callback , args , kwargs , self . name )
@ property
def callback ( self ) :
if self . _callback is not None :
return self . _callback
self . _callback = get_callable ( self . _callback_str )
return self . _callback
class RegexURLResolver ( LocaleRegexProvider ) :
def __init__ ( self , regex , urlconf_name , default_kwargs = None , app_name = None , namespace = None ) :
LocaleRegexProvider . __init__ ( self , regex )
self . urlconf_name = urlconf_name
if not isinstance ( urlconf_name , six . string_types ) :
self . _urlconf_module = self . urlconf_name
self . callback = None
self . default_kwargs = default_kwargs or { }
self . namespace = namespace
self . app_name = app_name
self . _reverse_dict = { }
self . _namespace_dict = { }
self . _app_dict = { }
self . _callback_strs = set ( )
self . _populated = False
def __repr__ ( self ) :
if isinstance ( self . urlconf_name , list ) and len ( self . urlconf_name ) :
urlconf_repr = '<%s list>' % self . urlconf_name [ 0 ] . __class__ . __name__
else :
urlconf_repr = repr ( self . urlconf_name )
return str ( '<%s %s (%s:%s) %s>' ) % ( self . __class__ . __name__ , urlconf_repr , self . app_name , self . namespace , self . regex . pattern )
def _populate ( self ) :
lookups = MultiValueDict ( )
namespaces = { }
apps = { }
language_code = get_language ( )
for pattern in reversed ( self . url_patterns ) :
if hasattr ( pattern , '_callback_str' ) :
self . _callback_strs . add ( pattern . _callback_str )
elif hasattr ( pattern , '_callback' ) :
callback = pattern . _callback
if isinstance ( callback , functools . partial ) :
callback = callback . func
if not hasattr ( callback , '__name__' ) :
lookup_str = callback . __module__ + "." + callback . __class__ . __name__
else :
lookup_str = callback . __module__ + "." + callback . __name__
self . _callback_strs . add ( lookup_str )
p_pattern = pattern . regex . pattern
if p_pattern . startswith ( '^' ) :
p_pattern = p_pattern [ 1 : ]
if isinstance ( pattern , RegexURLResolver ) :
if pattern . namespace :
namespaces [ pattern . namespace ] = ( p_pattern , pattern )
if pattern . app_name :
apps . setdefault ( pattern . app_name , [ ] ) . append ( pattern . namespace )
else :
parent_pat = pattern . regex . pattern
for name in pattern . reverse_dict :
for matches , pat , defaults in pattern . reverse_dict . getlist ( name ) :
new_matches = normalize ( parent_pat + pat )
lookups . appendlist ( name , ( new_matches , p_pattern + pat , dict ( defaults , ** pattern . default_kwargs ) ) )
for namespace , ( prefix , sub_pattern ) in pattern . namespace_dict . items ( ) :
namespaces [ namespace ] = ( p_pattern + prefix , sub_pattern )
for app_name , namespace_list in pattern . app_dict . items ( ) :
apps . setdefault ( app_name , [ ] ) . extend ( namespace_list )
self . _callback_strs . update ( pattern . _callback_strs )
else :
bits = normalize ( p_pattern )
lookups . appendlist ( pattern . callback , ( bits , p_pattern , pattern . default_args ) )
if pattern . name is not None :
lookups . appendlist ( pattern . name , ( bits , p_pattern , pattern . default_args ) )
self . _reverse_dict [ language_code ] = lookups
self . _namespace_dict [ language_code ] = namespaces
self . _app_dict [ language_code ] = apps
self . _populated = True
@ property
def reverse_dict ( self ) :
language_code = get_language ( )
if language_code not in self . _reverse_dict :
self . _populate ( )
return self . _reverse_dict [ language_code ]
@ property
def namespace_dict ( self ) :
language_code = get_language ( )
if language_code not in self . _namespace_dict :
self . _populate ( )
return self . _namespace_dict [ language_code ]
@ property
def app_dict ( self ) :
language_code = get_language ( )
if language_code not in self . _app_dict :
self . _populate ( )
return self . _app_dict [ language_code ]
def resolve ( self , path ) :
path = force_text ( path )
tried = [ ]
match = self . regex . search ( path )
if match :
new_path = path [ match . end ( ) : ]
for pattern in self . url_patterns :
try :
sub_match = pattern . resolve ( new_path )
except Resolver404 as e :
sub_tried = e . args [ 0 ] . get ( 'tried' )
if sub_tried is not None :
tried . extend ( [ pattern ] + t for t in sub_tried )
else :
tried . append ( [ pattern ] )
else :
if sub_match :
sub_match_dict = dict ( match . groupdict ( ) , ** self . default_kwargs )
sub_match_dict . update ( sub_match . kwargs )
return ResolverMatch ( sub_match . func , sub_match . args , sub_match_dict , sub_match . url_name , self . app_name or sub_match . app_name , [ self . namespace ] + sub_match . namespaces )
tried . append ( [ pattern ] )
raise Resolver404 ( { 'tried' : tried , 'path' : new_path } )
raise Resolver404 ( { 'path' : path } )
@ property
def urlconf_module ( self ) :
try :
return self . _urlconf_module
except AttributeError :
self . _urlconf_module = import_module ( self . urlconf_name )
return self . _urlconf_module
@ property
def url_patterns ( self ) :
patterns = getattr ( self . urlconf_module , "urlpatterns" , self . urlconf_module )
try :
iter ( patterns )
except TypeError :
msg = ( "The included urlconf '{name}' does not appear to have any " "patterns in it. If you see valid patterns in the file then " "the issue is probably caused by a circular import." )
raise ImproperlyConfigured ( msg . format ( name = self . urlconf_name ) )
return patterns
def resolve_error_handler ( self , view_type ) :
callback = getattr ( self . urlconf_module , 'handler%s' % view_type , None )
if not callback :
from django . conf import urls
callback = getattr ( urls , 'handler%s' % view_type )
return get_callable ( callback ) , { }
def reverse ( self , lookup_view , * args , ** kwargs ) :
return self . _reverse_with_prefix ( lookup_view , '' , * args , ** kwargs )
def _reverse_with_prefix ( self , lookup_view , _prefix , * args , ** kwargs ) :
if args and kwargs :
raise ValueError ( "Don't mix *args and **kwargs in call to reverse()!" )
text_args = [ force_text ( v ) for v in args ]
text_kwargs = dict ( ( k , force_text ( v ) ) for ( k , v ) in kwargs . items ( ) )
if not self . _populated :
self . _populate ( )
original_lookup = lookup_view
try :
if lookup_view in self . _callback_strs :
lookup_view = get_callable ( lookup_view , True )
except ( ImportError , AttributeError ) as e :
raise NoReverseMatch ( "Error importing '%s': %s." % ( lookup_view , e ) )
else :
if not callable ( original_lookup ) and callable ( lookup_view ) :
warnings . warn ( 'Reversing by dotted path is deprecated (%s).' % original_lookup , RemovedInDjango20Warning , stacklevel = 3 )
possibilities = self . reverse_dict . getlist ( lookup_view )
prefix_norm , prefix_args = normalize ( urlquote ( _prefix ) ) [ 0 ]
for possibility , pattern , defaults in possibilities :
for result , params in possibility :
if args :
if len ( args ) != len ( params ) + len ( prefix_args ) :
continue
candidate_subs = dict ( zip ( prefix_args + params , text_args ) )
else :
if set ( kwargs . keys ( ) ) | set ( defaults . keys ( ) ) != set ( params ) | set ( defaults . keys ( ) ) | set ( prefix_args ) :
continue
matches = True
for k , v in defaults . items ( ) :
if kwargs . get ( k , v ) != v :
matches = False
break
if not matches :
continue
candidate_subs = text_kwargs
candidate_pat = prefix_norm . replace ( '%' , '%%' ) + result
if re . search ( '^%s%s' % ( prefix_norm , pattern ) , candidate_pat % candidate_subs , re . UNICODE ) :
candidate_subs = dict ( ( k , urlquote ( v ) ) for ( k , v ) in candidate_subs . items ( ) )
return candidate_pat % candidate_subs
m = getattr ( lookup_view , '__module__' , None )
n = getattr ( lookup_view , '__name__' , None )
if m is not None and n is not None :
lookup_view_s = "%s.%s" % ( m , n )
else :
lookup_view_s = lookup_view
patterns = [ pattern for ( possibility , pattern , defaults ) in possibilities ]
raise NoReverseMatch ( "Reverse for '%s' with arguments '%s' and keyword " "arguments '%s' not found. %d pattern(s) tried: %s" % ( lookup_view_s , args , kwargs , len ( patterns ) , patterns ) )
class LocaleRegexURLResolver ( RegexURLResolver ) :
def __init__ ( self , urlconf_name , default_kwargs = None , app_name = None , namespace = None ) :
super ( LocaleRegexURLResolver , self ) . __init__ ( None , urlconf_name , default_kwargs , app_name , namespace )
@ property
def regex ( self ) :
language_code = get_language ( )
if language_code not in self . _regex_dict :
regex_compiled = re . compile ( '^%s/' % language_code , re . UNICODE )
self . _regex_dict [ language_code ] = regex_compiled
return self . _regex_dict [ language_code ]
def resolve ( path , urlconf = None ) :
if urlconf is None :
urlconf = get_urlconf ( )
return get_resolver ( urlconf ) . resolve ( path )
def reverse ( viewname , urlconf = None , args = None , kwargs = None , prefix = None , current_app = None ) :
if urlconf is None :
urlconf = get_urlconf ( )
resolver = get_resolver ( urlconf )
args = args or [ ]
kwargs = kwargs or { }
if prefix is None :
prefix = get_script_prefix ( )
if not isinstance ( viewname , six . string_types ) :
view = viewname
else :
parts = viewname . split ( ':' )
parts . reverse ( )
view = parts [ 0 ]
path = parts [ 1 : ]
resolved_path = [ ]
ns_pattern = ''
while path :
ns = path . pop ( )
try :
app_list = resolver . app_dict [ ns ]
if current_app and current_app in app_list :
ns = current_app
elif ns not in app_list :
ns = app_list [ 0 ]
except KeyError :
pass
try :
extra , resolver = resolver . namespace_dict [ ns ]
resolved_path . append ( ns )
ns_pattern = ns_pattern + extra
except KeyError as key :
if resolved_path :
raise NoReverseMatch ( "%s is not a registered namespace inside '%s'" % ( key , ':' . join ( resolved_path ) ) )
else :
raise NoReverseMatch ( "%s is not a registered namespace" % key )
if ns_pattern :
resolver = get_ns_resolver ( ns_pattern , resolver )
return iri_to_uri ( resolver . _reverse_with_prefix ( view , prefix , * args , ** kwargs ) )
reverse_lazy = lazy ( reverse , str )
def clear_url_caches ( ) :
get_callable . cache_clear ( )
get_resolver . cache_clear ( )
get_ns_resolver . cache_clear ( )
def set_script_prefix ( prefix ) :
if not prefix . endswith ( '/' ) :
prefix += '/'
_prefixes . value = prefix
def get_script_prefix ( ) :
return getattr ( _prefixes , "value" , '/' )
def clear_script_prefix ( ) :
try :
del _prefixes . value
except AttributeError :
pass
def set_urlconf ( urlconf_name ) :
if urlconf_name :
_urlconfs . value = urlconf_name
else :
if hasattr ( _urlconfs , "value" ) :
del _urlconfs . value
def get_urlconf ( default = None ) :
return getattr ( _urlconfs , "value" , default )
def is_valid_path ( path , urlconf = None ) :
try :
resolve ( path , urlconf )
return True
except Resolver404 :
return False
from __future__ import unicode_literals
import re
from django . core . exceptions import ValidationError
from django . utils . deconstruct import deconstructible
from django . utils . translation import ugettext_lazy as _ , ungettext_lazy
from django . utils . encoding import force_text
from django . utils . ipv6 import is_valid_ipv6_address
from django . utils import six
from django . utils . six . moves . urllib . parse import urlsplit , urlunsplit
EMPTY_VALUES = ( None , '' , [ ] , ( ) , { } )
@ deconstructible
class RegexValidator ( object ) :
regex = ''
message = _ ( 'Enter a valid value.' )
code = 'invalid'
inverse_match = False
flags = 0
def __init__ ( self , regex = None , message = None , code = None , inverse_match = None , flags = None ) :
if regex is not None :
self . regex = regex
if message is not None :
self . message = message
if code is not None :
self . code = code
if inverse_match is not None :
self . inverse_match = inverse_match
if flags is not None :
self . flags = flags
if self . flags and not isinstance ( self . regex , six . string_types ) :
raise TypeError ( "If the flags are set, regex must be a regular expression string." )
if isinstance ( self . regex , six . string_types ) :
self . regex = re . compile ( self . regex , self . flags )
def __call__ ( self , value ) :
if not ( self . inverse_match is not bool ( self . regex . search ( force_text ( value ) ) ) ) :
raise ValidationError ( self . message , code = self . code )
def __eq__ ( self , other ) :
return ( isinstance ( other , RegexValidator ) and self . regex . pattern == other . regex . pattern and self . regex . flags == other . regex . flags and ( self . message == other . message ) and ( self . code == other . code ) and ( self . inverse_match == other . inverse_match ) )
def __ne__ ( self , other ) :
return not ( self == other )
@ deconstructible
class URLValidator ( RegexValidator ) :
regex = re . compile ( r'^(?:[a-z0-9\.\-]*)://' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(? b
message = _ ( 'Ensure this value is less than or equal to %(limit_value)s.' )
code = 'max_value'
@ deconstructible
class MinValueValidator ( BaseValidator ) :
compare = lambda self , a , b : a < b
message = _ ( 'Ensure this value is greater than or equal to %(limit_value)s.' )
code = 'min_value'
@ deconstructible
class MinLengthValidator ( BaseValidator ) :
compare = lambda self , a , b : a < b
clean = lambda self , x : len ( x )
message = ungettext_lazy ( 'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).' , 'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).' , 'limit_value' )
code = 'min_length'
@ deconstructible
class MaxLengthValidator ( BaseValidator ) :
compare = lambda self , a , b : a > b
clean = lambda self , x : len ( x )
message = ungettext_lazy ( 'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).' , 'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).' , 'limit_value' )
code = 'max_length'
import django
from django . core . handlers . wsgi import WSGIHandler
def get_wsgi_application ( ) :
django . setup ( )
return WSGIHandler ( )
from django . core . exceptions import ValidationError
from django . forms . fields import *
from django . forms . forms import *
from django . forms . formsets import *
from django . forms . models import *
from django . forms . widgets import *
from django . forms . extras . widgets import SelectDateWidget
__all__ = [ 'SelectDateWidget' ]
from __future__ import unicode_literals
import datetime
import re
from django . forms . widgets import Widget , Select
from django . utils import datetime_safe
from django . utils . dates import MONTHS
from django . utils . encoding import force_str
from django . utils . safestring import mark_safe
from django . utils . formats import get_format
from django . utils import six
from django . conf import settings
__all__ = ( 'SelectDateWidget' , )
RE_DATE = re . compile ( r'(\d{4})-(\d\d?)-(\d\d?)$' )
def _parse_date_fmt ( ) :
fmt = get_format ( 'DATE_FORMAT' )
escaped = False
for char in fmt :
if escaped :
escaped = False
elif char == '\\' :
escaped = True
elif char in 'Yy' :
yield 'year'
elif char in 'bEFMmNn' :
yield 'month'
elif char in 'dj' :
yield 'day'
class SelectDateWidget ( Widget ) :
none_value = ( 0 , '---' )
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
def __init__ ( self , attrs = None , years = None , months = None , empty_label = None ) :
self . attrs = attrs or { }
if years :
self . years = years
else :
this_year = datetime . date . today ( ) . year
self . years = range ( this_year , this_year + 10 )
if months :
self . months = months
else :
self . months = MONTHS
if isinstance ( empty_label , ( list , tuple ) ) :
if not len ( empty_label ) == 3 :
raise ValueError ( 'empty_label list/tuple must have 3 elements.' )
self . year_none_value = ( 0 , empty_label [ 0 ] )
self . month_none_value = ( 0 , empty_label [ 1 ] )
self . day_none_value = ( 0 , empty_label [ 2 ] )
else :
if empty_label is not None :
self . none_value = ( 0 , empty_label )
self . year_none_value = self . none_value
self . month_none_value = self . none_value
self . day_none_value = self . none_value
def render ( self , name , value , attrs = None ) :
try :
year_val , month_val , day_val = value . year , value . month , value . day
except AttributeError :
year_val = month_val = day_val = None
if isinstance ( value , six . string_types ) :
if settings . USE_L10N :
try :
input_format = get_format ( 'DATE_INPUT_FORMATS' ) [ 0 ]
v = datetime . datetime . strptime ( force_str ( value ) , input_format )
year_val , month_val , day_val = v . year , v . month , v . day
except ValueError :
pass
else :
match = RE_DATE . match ( value )
if match :
year_val , month_val , day_val = [ int ( v ) for v in match . groups ( ) ]
html = { }
choices = [ ( i , i ) for i in self . years ]
html [ 'year' ] = self . create_select ( name , self . year_field , value , year_val , choices , self . year_none_value )
choices = list ( six . iteritems ( self . months ) )
html [ 'month' ] = self . create_select ( name , self . month_field , value , month_val , choices , self . month_none_value )
choices = [ ( i , i ) for i in range ( 1 , 32 ) ]
html [ 'day' ] = self . create_select ( name , self . day_field , value , day_val , choices , self . day_none_value )
output = [ ]
for field in _parse_date_fmt ( ) :
output . append ( html [ field ] )
return mark_safe ( '\n' . join ( output ) )
def id_for_label ( self , id_ ) :
for first_select in _parse_date_fmt ( ) :
return '%s_%s' % ( id_ , first_select )
else :
return '%s_month' % id_
def value_from_datadict ( self , data , files , name ) :
y = data . get ( self . year_field % name )
m = data . get ( self . month_field % name )
d = data . get ( self . day_field % name )
if y == m == d == "0" :
return None
if y and m and d :
if settings . USE_L10N :
input_format = get_format ( 'DATE_INPUT_FORMATS' ) [ 0 ]
try :
date_value = datetime . date ( int ( y ) , int ( m ) , int ( d ) )
except ValueError :
return '%s-%s-%s' % ( y , m , d )
else :
date_value = datetime_safe . new_date ( date_value )
return date_value . strftime ( input_format )
else :
return '%s-%s-%s' % ( y , m , d )
return data . get ( name , None )
def create_select ( self , name , field , value , val , choices , none_value ) :
if 'id' in self . attrs :
id_ = self . attrs [ 'id' ]
else :
id_ = 'id_%s' % name
if not self . is_required :
choices . insert ( 0 , none_value )
local_attrs = self . build_attrs ( id = field % id_ )
s = Select ( choices = choices )
select_html = s . render ( field % name , val , local_attrs )
return select_html
from __future__ import unicode_literals
import copy
import datetime
import os
import re
import sys
import warnings
from decimal import Decimal , DecimalException
from io import BytesIO
from django . core import validators
from django . core . exceptions import ValidationError
from django . forms . utils import from_current_timezone , to_current_timezone
from django . forms . widgets import ( TextInput , NumberInput , EmailInput , URLInput , HiddenInput , MultipleHiddenInput , ClearableFileInput , CheckboxInput , Select , NullBooleanSelect , SelectMultiple , DateInput , DateTimeInput , TimeInput , SplitDateTimeWidget , SplitHiddenDateTimeWidget , FILE_INPUT_CONTRADICTION )
from django . utils import formats
from django . utils . encoding import smart_text , force_str , force_text
from django . utils . ipv6 import clean_ipv6_address
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils import six
from django . utils . six . moves . urllib . parse import urlsplit , urlunsplit
from django . utils . translation import ugettext_lazy as _ , ungettext_lazy
from django . core . validators import EMPTY_VALUES
__all__ = ( 'Field' , 'CharField' , 'IntegerField' , 'DateField' , 'TimeField' , 'DateTimeField' , 'RegexField' , 'EmailField' , 'FileField' , 'ImageField' , 'URLField' , 'BooleanField' , 'NullBooleanField' , 'ChoiceField' , 'MultipleChoiceField' , 'ComboField' , 'MultiValueField' , 'FloatField' , 'DecimalField' , 'SplitDateTimeField' , 'IPAddressField' , 'GenericIPAddressField' , 'FilePathField' , 'SlugField' , 'TypedChoiceField' , 'TypedMultipleChoiceField' )
class Field ( object ) :
widget = TextInput
hidden_widget = HiddenInput
default_validators = [ ]
default_error_messages = { 'required' : _ ( 'This field is required.' ) , }
empty_values = list ( validators . EMPTY_VALUES )
creation_counter = 0
def __init__ ( self , required = True , widget = None , label = None , initial = None , help_text = '' , error_messages = None , show_hidden_initial = False , validators = [ ] , localize = False , label_suffix = None ) :
self . required , self . label , self . initial = required , label , initial
self . show_hidden_initial = show_hidden_initial
self . help_text = help_text
self . label_suffix = label_suffix
widget = widget or self . widget
if isinstance ( widget , type ) :
widget = widget ( )
self . localize = localize
if self . localize :
widget . is_localized = True
widget . is_required = self . required
extra_attrs = self . widget_attrs ( widget )
if extra_attrs :
widget . attrs . update ( extra_attrs )
self . widget = widget
self . creation_counter = Field . creation_counter
Field . creation_counter += 1
messages = { }
for c in reversed ( self . __class__ . __mro__ ) :
messages . update ( getattr ( c , 'default_error_messages' , { } ) )
messages . update ( error_messages or { } )
self . error_messages = messages
self . validators = self . default_validators + validators
super ( Field , self ) . __init__ ( )
def prepare_value ( self , value ) :
return value
def to_python ( self , value ) :
return value
def validate ( self , value ) :
if value in self . empty_values and self . required :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
def run_validators ( self , value ) :
if value in self . empty_values :
return
errors = [ ]
for v in self . validators :
try :
v ( value )
except ValidationError as e :
if hasattr ( e , 'code' ) and e . code in self . error_messages :
e . message = self . error_messages [ e . code ]
errors . extend ( e . error_list )
if errors :
raise ValidationError ( errors )
def clean ( self , value ) :
value = self . to_python ( value )
self . validate ( value )
self . run_validators ( value )
return value
def bound_data ( self , data , initial ) :
return data
def widget_attrs ( self , widget ) :
return { }
def get_limit_choices_to ( self ) :
if callable ( self . limit_choices_to ) :
return self . limit_choices_to ( )
return self . limit_choices_to
def _has_changed ( self , initial , data ) :
initial_value = initial if initial is not None else ''
try :
data = self . to_python ( data )
if hasattr ( self , '_coerce' ) :
data = self . _coerce ( data )
except ValidationError :
return True
data_value = data if data is not None else ''
return initial_value != data_value
def __deepcopy__ ( self , memo ) :
result = copy . copy ( self )
memo [ id ( self ) ] = result
result . widget = copy . deepcopy ( self . widget , memo )
result . validators = self . validators [ : ]
return result
class CharField ( Field ) :
def __init__ ( self , max_length = None , min_length = None , * args , ** kwargs ) :
self . max_length , self . min_length = max_length , min_length
super ( CharField , self ) . __init__ ( * args , ** kwargs )
if min_length is not None :
self . validators . append ( validators . MinLengthValidator ( int ( min_length ) ) )
if max_length is not None :
self . validators . append ( validators . MaxLengthValidator ( int ( max_length ) ) )
def to_python ( self , value ) :
if value in self . empty_values :
return ''
return smart_text ( value )
def widget_attrs ( self , widget ) :
attrs = super ( CharField , self ) . widget_attrs ( widget )
if self . max_length is not None :
attrs . update ( { 'maxlength' : str ( self . max_length ) } )
return attrs
class IntegerField ( Field ) :
widget = NumberInput
default_error_messages = { 'invalid' : _ ( 'Enter a whole number.' ) , }
def __init__ ( self , max_value = None , min_value = None , * args , ** kwargs ) :
self . max_value , self . min_value = max_value , min_value
if kwargs . get ( 'localize' ) and self . widget == NumberInput :
kwargs . setdefault ( 'widget' , super ( IntegerField , self ) . widget )
super ( IntegerField , self ) . __init__ ( * args , ** kwargs )
if max_value is not None :
self . validators . append ( validators . MaxValueValidator ( max_value ) )
if min_value is not None :
self . validators . append ( validators . MinValueValidator ( min_value ) )
def to_python ( self , value ) :
value = super ( IntegerField , self ) . to_python ( value )
if value in self . empty_values :
return None
if self . localize :
value = formats . sanitize_separators ( value )
try :
value = int ( str ( value ) )
except ( ValueError , TypeError ) :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
return value
def widget_attrs ( self , widget ) :
attrs = super ( IntegerField , self ) . widget_attrs ( widget )
if isinstance ( widget , NumberInput ) :
if self . min_value is not None :
attrs [ 'min' ] = self . min_value
if self . max_value is not None :
attrs [ 'max' ] = self . max_value
return attrs
class FloatField ( IntegerField ) :
default_error_messages = { 'invalid' : _ ( 'Enter a number.' ) , }
def to_python ( self , value ) :
value = super ( IntegerField , self ) . to_python ( value )
if value in self . empty_values :
return None
if self . localize :
value = formats . sanitize_separators ( value )
try :
value = float ( value )
except ( ValueError , TypeError ) :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
return value
def validate ( self , value ) :
super ( FloatField , self ) . validate ( value )
if value != value or value in ( Decimal ( 'Inf' ) , Decimal ( '-Inf' ) ) :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
return value
def widget_attrs ( self , widget ) :
attrs = super ( FloatField , self ) . widget_attrs ( widget )
if isinstance ( widget , NumberInput ) and 'step' not in widget . attrs :
attrs . setdefault ( 'step' , 'any' )
return attrs
class DecimalField ( IntegerField ) :
default_error_messages = { 'invalid' : _ ( 'Enter a number.' ) , 'max_digits' : ungettext_lazy ( 'Ensure that there are no more than %(max)s digit in total.' , 'Ensure that there are no more than %(max)s digits in total.' , 'max' ) , 'max_decimal_places' : ungettext_lazy ( 'Ensure that there are no more than %(max)s decimal place.' , 'Ensure that there are no more than %(max)s decimal places.' , 'max' ) , 'max_whole_digits' : ungettext_lazy ( 'Ensure that there are no more than %(max)s digit before the decimal point.' , 'Ensure that there are no more than %(max)s digits before the decimal point.' , 'max' ) , }
def __init__ ( self , max_value = None , min_value = None , max_digits = None , decimal_places = None , * args , ** kwargs ) :
self . max_digits , self . decimal_places = max_digits , decimal_places
super ( DecimalField , self ) . __init__ ( max_value , min_value , * args , ** kwargs )
def to_python ( self , value ) :
if value in self . empty_values :
return None
if self . localize :
value = formats . sanitize_separators ( value )
value = smart_text ( value ) . strip ( )
try :
value = Decimal ( value )
except DecimalException :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
return value
def validate ( self , value ) :
super ( DecimalField , self ) . validate ( value )
if value in self . empty_values :
return
if value != value or value == Decimal ( "Inf" ) or value == Decimal ( "-Inf" ) :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
sign , digittuple , exponent = value . as_tuple ( )
decimals = abs ( exponent )
digits = len ( digittuple )
if decimals > digits :
digits = decimals
whole_digits = digits - decimals
if self . max_digits is not None and digits > self . max_digits :
raise ValidationError ( self . error_messages [ 'max_digits' ] , code = 'max_digits' , params = { 'max' : self . max_digits } , )
if self . decimal_places is not None and decimals > self . decimal_places :
raise ValidationError ( self . error_messages [ 'max_decimal_places' ] , code = 'max_decimal_places' , params = { 'max' : self . decimal_places } , )
if ( self . max_digits is not None and self . decimal_places is not None and whole_digits > ( self . max_digits - self . decimal_places ) ) :
raise ValidationError ( self . error_messages [ 'max_whole_digits' ] , code = 'max_whole_digits' , params = { 'max' : ( self . max_digits - self . decimal_places ) } , )
return value
def widget_attrs ( self , widget ) :
attrs = super ( DecimalField , self ) . widget_attrs ( widget )
if isinstance ( widget , NumberInput ) and 'step' not in widget . attrs :
if self . decimal_places is not None :
step = str ( Decimal ( '1' ) / 10 ** self . decimal_places ) . lower ( )
else :
step = 'any'
attrs . setdefault ( 'step' , step )
return attrs
class BaseTemporalField ( Field ) :
def __init__ ( self , input_formats = None , * args , ** kwargs ) :
super ( BaseTemporalField , self ) . __init__ ( * args , ** kwargs )
if input_formats is not None :
self . input_formats = input_formats
def to_python ( self , value ) :
unicode_value = force_text ( value , strings_only = True )
if isinstance ( unicode_value , six . text_type ) :
value = unicode_value . strip ( )
if isinstance ( value , six . text_type ) :
for format in self . input_formats :
try :
return self . strptime ( value , format )
except ( ValueError , TypeError ) :
continue
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
def strptime ( self , value , format ) :
raise NotImplementedError ( 'Subclasses must define this method.' )
class DateField ( BaseTemporalField ) :
widget = DateInput
input_formats = formats . get_format_lazy ( 'DATE_INPUT_FORMATS' )
default_error_messages = { 'invalid' : _ ( 'Enter a valid date.' ) , }
def to_python ( self , value ) :
if value in self . empty_values :
return None
if isinstance ( value , datetime . datetime ) :
return value . date ( )
if isinstance ( value , datetime . date ) :
return value
return super ( DateField , self ) . to_python ( value )
def strptime ( self , value , format ) :
return datetime . datetime . strptime ( force_str ( value ) , format ) . date ( )
class TimeField ( BaseTemporalField ) :
widget = TimeInput
input_formats = formats . get_format_lazy ( 'TIME_INPUT_FORMATS' )
default_error_messages = { 'invalid' : _ ( 'Enter a valid time.' ) }
def to_python ( self , value ) :
if value in self . empty_values :
return None
if isinstance ( value , datetime . time ) :
return value
return super ( TimeField , self ) . to_python ( value )
def strptime ( self , value , format ) :
return datetime . datetime . strptime ( force_str ( value ) , format ) . time ( )
class DateTimeField ( BaseTemporalField ) :
widget = DateTimeInput
input_formats = formats . get_format_lazy ( 'DATETIME_INPUT_FORMATS' )
default_error_messages = { 'invalid' : _ ( 'Enter a valid date/time.' ) , }
def prepare_value ( self , value ) :
if isinstance ( value , datetime . datetime ) :
value = to_current_timezone ( value )
return value
def to_python ( self , value ) :
if value in self . empty_values :
return None
if isinstance ( value , datetime . datetime ) :
return from_current_timezone ( value )
if isinstance ( value , datetime . date ) :
result = datetime . datetime ( value . year , value . month , value . day )
return from_current_timezone ( result )
if isinstance ( value , list ) :
warnings . warn ( 'Using SplitDateTimeWidget with DateTimeField is deprecated. ' 'Use SplitDateTimeField instead.' , RemovedInDjango19Warning , stacklevel = 2 )
if len ( value ) != 2 :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
if value [ 0 ] in self . empty_values and value [ 1 ] in self . empty_values :
return None
value = '%s %s' % tuple ( value )
result = super ( DateTimeField , self ) . to_python ( value )
return from_current_timezone ( result )
def strptime ( self , value , format ) :
return datetime . datetime . strptime ( force_str ( value ) , format )
class RegexField ( CharField ) :
def __init__ ( self , regex , max_length = None , min_length = None , error_message = None , * args , ** kwargs ) :
if error_message is not None :
error_messages = kwargs . get ( 'error_messages' ) or { }
error_messages [ 'invalid' ] = error_message
kwargs [ 'error_messages' ] = error_messages
super ( RegexField , self ) . __init__ ( max_length , min_length , * args , ** kwargs )
self . _set_regex ( regex )
def _get_regex ( self ) :
return self . _regex
def _set_regex ( self , regex ) :
if isinstance ( regex , six . string_types ) :
regex = re . compile ( regex , re . UNICODE )
self . _regex = regex
if hasattr ( self , '_regex_validator' ) and self . _regex_validator in self . validators :
self . validators . remove ( self . _regex_validator )
self . _regex_validator = validators . RegexValidator ( regex = regex )
self . validators . append ( self . _regex_validator )
regex = property ( _get_regex , _set_regex )
class EmailField ( CharField ) :
widget = EmailInput
default_validators = [ validators . validate_email ]
def clean ( self , value ) :
value = self . to_python ( value ) . strip ( )
return super ( EmailField , self ) . clean ( value )
class FileField ( Field ) :
widget = ClearableFileInput
default_error_messages = { 'invalid' : _ ( "No file was submitted. Check the encoding type on the form." ) , 'missing' : _ ( "No file was submitted." ) , 'empty' : _ ( "The submitted file is empty." ) , 'max_length' : ungettext_lazy ( 'Ensure this filename has at most %(max)d character (it has %(length)d).' , 'Ensure this filename has at most %(max)d characters (it has %(length)d).' , 'max' ) , 'contradiction' : _ ( 'Please either submit a file or check the clear checkbox, not both.' ) }
def __init__ ( self , * args , ** kwargs ) :
self . max_length = kwargs . pop ( 'max_length' , None )
self . allow_empty_file = kwargs . pop ( 'allow_empty_file' , False )
super ( FileField , self ) . __init__ ( * args , ** kwargs )
def to_python ( self , data ) :
if data in self . empty_values :
return None
try :
file_name = data . name
file_size = data . size
except AttributeError :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
if self . max_length is not None and len ( file_name ) > self . max_length :
params = { 'max' : self . max_length , 'length' : len ( file_name ) }
raise ValidationError ( self . error_messages [ 'max_length' ] , code = 'max_length' , params = params )
if not file_name :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
if not self . allow_empty_file and not file_size :
raise ValidationError ( self . error_messages [ 'empty' ] , code = 'empty' )
return data
def clean ( self , data , initial = None ) :
if data is FILE_INPUT_CONTRADICTION :
raise ValidationError ( self . error_messages [ 'contradiction' ] , code = 'contradiction' )
if data is False :
if not self . required :
return False
data = None
if not data and initial :
return initial
return super ( FileField , self ) . clean ( data )
def bound_data ( self , data , initial ) :
if data in ( None , FILE_INPUT_CONTRADICTION ) :
return initial
return data
def _has_changed ( self , initial , data ) :
if data is None :
return False
return True
class ImageField ( FileField ) :
default_error_messages = { 'invalid_image' : _ ( "Upload a valid image. The file you uploaded was either not an image or a corrupted image." ) , }
def to_python ( self , data ) :
f = super ( ImageField , self ) . to_python ( data )
if f is None :
return None
from PIL import Image
if hasattr ( data , 'temporary_file_path' ) :
file = data . temporary_file_path ( )
else :
if hasattr ( data , 'read' ) :
file = BytesIO ( data . read ( ) )
else :
file = BytesIO ( data [ 'content' ] )
try :
Image . open ( file ) . verify ( )
except Exception :
six . reraise ( ValidationError , ValidationError ( self . error_messages [ 'invalid_image' ] , code = 'invalid_image' , ) , sys . exc_info ( ) [ 2 ] )
if hasattr ( f , 'seek' ) and callable ( f . seek ) :
f . seek ( 0 )
return f
class URLField ( CharField ) :
widget = URLInput
default_error_messages = { 'invalid' : _ ( 'Enter a valid URL.' ) , }
default_validators = [ validators . URLValidator ( ) ]
def to_python ( self , value ) :
def split_url ( url ) :
try :
return list ( urlsplit ( url ) )
except ValueError :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
value = super ( URLField , self ) . to_python ( value )
if value :
url_fields = split_url ( value )
if not url_fields [ 0 ] :
url_fields [ 0 ] = 'http'
if not url_fields [ 1 ] :
url_fields [ 1 ] = url_fields [ 2 ]
url_fields [ 2 ] = ''
url_fields = split_url ( urlunsplit ( url_fields ) )
value = urlunsplit ( url_fields )
return value
def clean ( self , value ) :
value = self . to_python ( value ) . strip ( )
return super ( URLField , self ) . clean ( value )
class BooleanField ( Field ) :
widget = CheckboxInput
def to_python ( self , value ) :
if isinstance ( value , six . string_types ) and value . lower ( ) in ( 'false' , '0' ) :
value = False
else :
value = bool ( value )
return super ( BooleanField , self ) . to_python ( value )
def validate ( self , value ) :
if not value and self . required :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
def _has_changed ( self , initial , data ) :
if initial == 'False' :
initial = False
return bool ( initial ) != bool ( data )
class NullBooleanField ( BooleanField ) :
widget = NullBooleanSelect
def to_python ( self , value ) :
if value in ( True , 'True' , '1' ) :
return True
elif value in ( False , 'False' , '0' ) :
return False
else :
return None
def validate ( self , value ) :
pass
def _has_changed ( self , initial , data ) :
if initial is not None :
initial = bool ( initial )
if data is not None :
data = bool ( data )
return initial != data
class ChoiceField ( Field ) :
widget = Select
default_error_messages = { 'invalid_choice' : _ ( 'Select a valid choice. %(value)s is not one of the available choices.' ) , }
def __init__ ( self , choices = ( ) , required = True , widget = None , label = None , initial = None , help_text = '' , * args , ** kwargs ) :
super ( ChoiceField , self ) . __init__ ( required = required , widget = widget , label = label , initial = initial , help_text = help_text , * args , ** kwargs )
self . choices = choices
def __deepcopy__ ( self , memo ) :
result = super ( ChoiceField , self ) . __deepcopy__ ( memo )
result . _choices = copy . deepcopy ( self . _choices , memo )
return result
def _get_choices ( self ) :
return self . _choices
def _set_choices ( self , value ) :
self . _choices = self . widget . choices = list ( value )
choices = property ( _get_choices , _set_choices )
def to_python ( self , value ) :
if value in self . empty_values :
return ''
return smart_text ( value )
def validate ( self , value ) :
super ( ChoiceField , self ) . validate ( value )
if value and not self . valid_value ( value ) :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' , params = { 'value' : value } , )
def valid_value ( self , value ) :
text_value = force_text ( value )
for k , v in self . choices :
if isinstance ( v , ( list , tuple ) ) :
for k2 , v2 in v :
if value == k2 or text_value == force_text ( k2 ) :
return True
else :
if value == k or text_value == force_text ( k ) :
return True
return False
class TypedChoiceField ( ChoiceField ) :
def __init__ ( self , * args , ** kwargs ) :
self . coerce = kwargs . pop ( 'coerce' , lambda val : val )
self . empty_value = kwargs . pop ( 'empty_value' , '' )
super ( TypedChoiceField , self ) . __init__ ( * args , ** kwargs )
def _coerce ( self , value ) :
if value == self . empty_value or value in self . empty_values :
return self . empty_value
try :
value = self . coerce ( value )
except ( ValueError , TypeError , ValidationError ) :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' , params = { 'value' : value } , )
return value
def clean ( self , value ) :
value = super ( TypedChoiceField , self ) . clean ( value )
return self . _coerce ( value )
class MultipleChoiceField ( ChoiceField ) :
hidden_widget = MultipleHiddenInput
widget = SelectMultiple
default_error_messages = { 'invalid_choice' : _ ( 'Select a valid choice. %(value)s is not one of the available choices.' ) , 'invalid_list' : _ ( 'Enter a list of values.' ) , }
def to_python ( self , value ) :
if not value :
return [ ]
elif not isinstance ( value , ( list , tuple ) ) :
raise ValidationError ( self . error_messages [ 'invalid_list' ] , code = 'invalid_list' )
return [ smart_text ( val ) for val in value ]
def validate ( self , value ) :
if self . required and not value :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
for val in value :
if not self . valid_value ( val ) :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' , params = { 'value' : val } , )
def _has_changed ( self , initial , data ) :
if initial is None :
initial = [ ]
if data is None :
data = [ ]
if len ( initial ) != len ( data ) :
return True
initial_set = set ( force_text ( value ) for value in initial )
data_set = set ( force_text ( value ) for value in data )
return data_set != initial_set
class TypedMultipleChoiceField ( MultipleChoiceField ) :
def __init__ ( self , * args , ** kwargs ) :
self . coerce = kwargs . pop ( 'coerce' , lambda val : val )
self . empty_value = kwargs . pop ( 'empty_value' , [ ] )
super ( TypedMultipleChoiceField , self ) . __init__ ( * args , ** kwargs )
def _coerce ( self , value ) :
if value == self . empty_value or value in self . empty_values :
return self . empty_value
new_value = [ ]
for choice in value :
try :
new_value . append ( self . coerce ( choice ) )
except ( ValueError , TypeError , ValidationError ) :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' , params = { 'value' : choice } , )
return new_value
def clean ( self , value ) :
value = super ( TypedMultipleChoiceField , self ) . clean ( value )
return self . _coerce ( value )
def validate ( self , value ) :
if value != self . empty_value :
super ( TypedMultipleChoiceField , self ) . validate ( value )
elif self . required :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
class ComboField ( Field ) :
def __init__ ( self , fields = ( ) , * args , ** kwargs ) :
super ( ComboField , self ) . __init__ ( * args , ** kwargs )
for f in fields :
f . required = False
self . fields = fields
def clean ( self , value ) :
super ( ComboField , self ) . clean ( value )
for field in self . fields :
value = field . clean ( value )
return value
class MultiValueField ( Field ) :
default_error_messages = { 'invalid' : _ ( 'Enter a list of values.' ) , 'incomplete' : _ ( 'Enter a complete value.' ) , }
def __init__ ( self , fields = ( ) , * args , ** kwargs ) :
self . require_all_fields = kwargs . pop ( 'require_all_fields' , True )
super ( MultiValueField , self ) . __init__ ( * args , ** kwargs )
for f in fields :
f . error_messages . setdefault ( 'incomplete' , self . error_messages [ 'incomplete' ] )
if self . require_all_fields :
f . required = False
self . fields = fields
def __deepcopy__ ( self , memo ) :
result = super ( MultiValueField , self ) . __deepcopy__ ( memo )
result . fields = tuple ( [ x . __deepcopy__ ( memo ) for x in self . fields ] )
return result
def validate ( self , value ) :
pass
def clean ( self , value ) :
clean_data = [ ]
errors = [ ]
if not value or isinstance ( value , ( list , tuple ) ) :
if not value or not [ v for v in value if v not in self . empty_values ] :
if self . required :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
else :
return self . compress ( [ ] )
else :
raise ValidationError ( self . error_messages [ 'invalid' ] , code = 'invalid' )
for i , field in enumerate ( self . fields ) :
try :
field_value = value [ i ]
except IndexError :
field_value = None
if field_value in self . empty_values :
if self . require_all_fields :
if self . required :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
elif field . required :
if field . error_messages [ 'incomplete' ] not in errors :
errors . append ( field . error_messages [ 'incomplete' ] )
continue
try :
clean_data . append ( field . clean ( field_value ) )
except ValidationError as e :
errors . extend ( m for m in e . error_list if m not in errors )
if errors :
raise ValidationError ( errors )
out = self . compress ( clean_data )
self . validate ( out )
self . run_validators ( out )
return out
def compress ( self , data_list ) :
raise NotImplementedError ( 'Subclasses must implement this method.' )
def _has_changed ( self , initial , data ) :
if initial is None :
initial = [ '' for x in range ( 0 , len ( data ) ) ]
else :
if not isinstance ( initial , list ) :
initial = self . widget . decompress ( initial )
for field , initial , data in zip ( self . fields , initial , data ) :
if field . _has_changed ( field . to_python ( initial ) , data ) :
return True
return False
class FilePathField ( ChoiceField ) :
def __init__ ( self , path , match = None , recursive = False , allow_files = True , allow_folders = False , required = True , widget = None , label = None , initial = None , help_text = '' , * args , ** kwargs ) :
self . path , self . match , self . recursive = path , match , recursive
self . allow_files , self . allow_folders = allow_files , allow_folders
super ( FilePathField , self ) . __init__ ( choices = ( ) , required = required , widget = widget , label = label , initial = initial , help_text = help_text , * args , ** kwargs )
if self . required :
self . choices = [ ]
else :
self . choices = [ ( "" , "---------" ) ]
if self . match is not None :
self . match_re = re . compile ( self . match )
if recursive :
for root , dirs , files in sorted ( os . walk ( self . path ) ) :
if self . allow_files :
for f in files :
if self . match is None or self . match_re . search ( f ) :
f = os . path . join ( root , f )
self . choices . append ( ( f , f . replace ( path , "" , 1 ) ) )
if self . allow_folders :
for f in dirs :
if f == '__pycache__' :
continue
if self . match is None or self . match_re . search ( f ) :
f = os . path . join ( root , f )
self . choices . append ( ( f , f . replace ( path , "" , 1 ) ) )
else :
try :
for f in sorted ( os . listdir ( self . path ) ) :
if f == '__pycache__' :
continue
full_file = os . path . join ( self . path , f )
if ( ( ( self . allow_files and os . path . isfile ( full_file ) ) or ( self . allow_folders and os . path . isdir ( full_file ) ) ) and ( self . match is None or self . match_re . search ( f ) ) ) :
self . choices . append ( ( full_file , f ) )
except OSError :
pass
self . widget . choices = self . choices
class SplitDateTimeField ( MultiValueField ) :
widget = SplitDateTimeWidget
hidden_widget = SplitHiddenDateTimeWidget
default_error_messages = { 'invalid_date' : _ ( 'Enter a valid date.' ) , 'invalid_time' : _ ( 'Enter a valid time.' ) , }
def __init__ ( self , input_date_formats = None , input_time_formats = None , * args , ** kwargs ) :
errors = self . default_error_messages . copy ( )
if 'error_messages' in kwargs :
errors . update ( kwargs [ 'error_messages' ] )
localize = kwargs . get ( 'localize' , False )
fields = ( DateField ( input_formats = input_date_formats , error_messages = { 'invalid' : errors [ 'invalid_date' ] } , localize = localize ) , TimeField ( input_formats = input_time_formats , error_messages = { 'invalid' : errors [ 'invalid_time' ] } , localize = localize ) , )
super ( SplitDateTimeField , self ) . __init__ ( fields , * args , ** kwargs )
def compress ( self , data_list ) :
if data_list :
if data_list [ 0 ] in self . empty_values :
raise ValidationError ( self . error_messages [ 'invalid_date' ] , code = 'invalid_date' )
if data_list [ 1 ] in self . empty_values :
raise ValidationError ( self . error_messages [ 'invalid_time' ] , code = 'invalid_time' )
result = datetime . datetime . combine ( * data_list )
return from_current_timezone ( result )
return None
class IPAddressField ( CharField ) :
default_validators = [ validators . validate_ipv4_address ]
def __init__ ( self , * args , ** kwargs ) :
warnings . warn ( "IPAddressField has been deprecated. Use GenericIPAddressField instead." , RemovedInDjango19Warning )
super ( IPAddressField , self ) . __init__ ( * args , ** kwargs )
def to_python ( self , value ) :
if value in self . empty_values :
return ''
return value . strip ( )
class GenericIPAddressField ( CharField ) :
def __init__ ( self , protocol = 'both' , unpack_ipv4 = False , * args , ** kwargs ) :
self . unpack_ipv4 = unpack_ipv4
self . default_validators = validators . ip_address_validators ( protocol , unpack_ipv4 ) [ 0 ]
super ( GenericIPAddressField , self ) . __init__ ( * args , ** kwargs )
def to_python ( self , value ) :
if value in self . empty_values :
return ''
value = value . strip ( )
if value and ':' in value :
return clean_ipv6_address ( value , self . unpack_ipv4 )
return value
class SlugField ( CharField ) :
default_validators = [ validators . validate_slug ]
def clean ( self , value ) :
value = self . to_python ( value ) . strip ( )
return super ( SlugField , self ) . clean ( value )
from __future__ import unicode_literals
from collections import OrderedDict
import copy
import datetime
import warnings
from django . core . exceptions import ValidationError , NON_FIELD_ERRORS
from django . forms . fields import Field , FileField
from django . forms . utils import flatatt , ErrorDict , ErrorList
from django . forms . widgets import Media , MediaDefiningClass , TextInput , Textarea
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . encoding import smart_text , force_text , python_2_unicode_compatible
from django . utils . html import conditional_escape , format_html
from django . utils . safestring import mark_safe
from django . utils . translation import ugettext as _
from django . utils import six
__all__ = ( 'BaseForm' , 'Form' )
def pretty_name ( name ) :
if not name :
return ''
return name . replace ( '_' , ' ' ) . capitalize ( )
def get_declared_fields ( bases , attrs , with_base_fields = True ) :
warnings . warn ( "get_declared_fields is deprecated and will be removed in Django 1.9." , RemovedInDjango19Warning , stacklevel = 2 , )
fields = [ ( field_name , attrs . pop ( field_name ) ) for field_name , obj in list ( six . iteritems ( attrs ) ) if isinstance ( obj , Field ) ]
fields . sort ( key = lambda x : x [ 1 ] . creation_counter )
if with_base_fields :
for base in bases [ : : - 1 ] :
if hasattr ( base , 'base_fields' ) :
fields = list ( six . iteritems ( base . base_fields ) ) + fields
else :
for base in bases [ : : - 1 ] :
if hasattr ( base , 'declared_fields' ) :
fields = list ( six . iteritems ( base . declared_fields ) ) + fields
return OrderedDict ( fields )
class DeclarativeFieldsMetaclass ( MediaDefiningClass ) :
def __new__ ( mcs , name , bases , attrs ) :
current_fields = [ ]
for key , value in list ( attrs . items ( ) ) :
if isinstance ( value , Field ) :
current_fields . append ( ( key , value ) )
attrs . pop ( key )
current_fields . sort ( key = lambda x : x [ 1 ] . creation_counter )
attrs [ 'declared_fields' ] = OrderedDict ( current_fields )
new_class = ( super ( DeclarativeFieldsMetaclass , mcs ) . __new__ ( mcs , name , bases , attrs ) )
declared_fields = OrderedDict ( )
for base in reversed ( new_class . __mro__ ) :
if hasattr ( base , 'declared_fields' ) :
declared_fields . update ( base . declared_fields )
for attr , value in base . __dict__ . items ( ) :
if value is None and attr in declared_fields :
declared_fields . pop ( attr )
new_class . base_fields = declared_fields
new_class . declared_fields = declared_fields
return new_class
@ python_2_unicode_compatible
class BaseForm ( object ) :
def __init__ ( self , data = None , files = None , auto_id = 'id_%s' , prefix = None , initial = None , error_class = ErrorList , label_suffix = None , empty_permitted = False ) :
self . is_bound = data is not None or files is not None
self . data = data or { }
self . files = files or { }
self . auto_id = auto_id
self . prefix = prefix
self . initial = initial or { }
self . error_class = error_class
self . label_suffix = label_suffix if label_suffix is not None else _ ( ':' )
self . empty_permitted = empty_permitted
self . _errors = None
self . _changed_data = None
self . fields = copy . deepcopy ( self . base_fields )
def __str__ ( self ) :
return self . as_table ( )
def __iter__ ( self ) :
for name in self . fields :
yield self [ name ]
def __getitem__ ( self , name ) :
try :
field = self . fields [ name ]
except KeyError :
raise KeyError ( "Key %r not found in '%s'" % ( name , self . __class__ . __name__ ) )
return BoundField ( self , field , name )
@ property
def errors ( self ) :
if self . _errors is None :
self . full_clean ( )
return self . _errors
def is_valid ( self ) :
return self . is_bound and not self . errors
def add_prefix ( self , field_name ) :
return '%s-%s' % ( self . prefix , field_name ) if self . prefix else field_name
def add_initial_prefix ( self , field_name ) :
return 'initial-%s' % self . add_prefix ( field_name )
def _html_output ( self , normal_row , error_row , row_ender , help_text_html , errors_on_separate_row ) :
top_errors = self . non_field_errors ( )
output , hidden_fields = [ ] , [ ]
for name , field in self . fields . items ( ) :
html_class_attr = ''
bf = self [ name ]
bf_errors = self . error_class ( [ conditional_escape ( error ) for error in bf . errors ] )
if bf . is_hidden :
if bf_errors :
top_errors . extend ( [ _ ( '(Hidden field %(name)s) %(error)s' ) % { 'name' : name , 'error' : force_text ( e ) } for e in bf_errors ] )
hidden_fields . append ( six . text_type ( bf ) )
else :
css_classes = bf . css_classes ( )
if css_classes :
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors :
output . append ( error_row % force_text ( bf_errors ) )
if bf . label :
label = conditional_escape ( force_text ( bf . label ) )
label = bf . label_tag ( label ) or ''
else :
label = ''
if field . help_text :
help_text = help_text_html % force_text ( field . help_text )
else :
help_text = ''
output . append ( normal_row % { 'errors' : force_text ( bf_errors ) , 'label' : force_text ( label ) , 'field' : six . text_type ( bf ) , 'help_text' : help_text , 'html_class_attr' : html_class_attr , 'field_name' : bf . html_name , } )
if top_errors :
output . insert ( 0 , error_row % force_text ( top_errors ) )
if hidden_fields :
str_hidden = '' . join ( hidden_fields )
if output :
last_row = output [ - 1 ]
if not last_row . endswith ( row_ender ) :
last_row = ( normal_row % { 'errors' : '' , 'label' : '' , 'field' : '' , 'help_text' : '' , 'html_class_attr' : html_class_attr } )
output . append ( last_row )
output [ - 1 ] = last_row [ : - len ( row_ender ) ] + str_hidden + row_ender
else :
output . append ( str_hidden )
return mark_safe ( '\n' . join ( output ) )
def as_table ( self ) :
return self . _html_output ( normal_row = '%(label)s | %(errors)s%(field)s%(help_text)s |
' , error_row = '%s |
' , row_ender = '' , help_text_html = '
%s' , errors_on_separate_row = False )
def as_ul ( self ) :
return self . _html_output ( normal_row = '%(errors)s%(label)s %(field)s%(help_text)s' , error_row = '%s' , row_ender = '' , help_text_html = ' %s' , errors_on_separate_row = False )
def as_p ( self ) :
return self . _html_output ( normal_row = '%(label)s %(field)s%(help_text)s
' , error_row = '%s' , row_ender = '
' , help_text_html = ' %s' , errors_on_separate_row = True )
def non_field_errors ( self ) :
return self . errors . get ( NON_FIELD_ERRORS , self . error_class ( error_class = 'nonfield' ) )
def _raw_value ( self , fieldname ) :
field = self . fields [ fieldname ]
prefix = self . add_prefix ( fieldname )
return field . widget . value_from_datadict ( self . data , self . files , prefix )
def add_error ( self , field , error ) :
if not isinstance ( error , ValidationError ) :
error = ValidationError ( error )
if hasattr ( error , 'error_dict' ) :
if field is not None :
raise TypeError ( "The argument `field` must be `None` when the `error` " "argument contains errors for multiple fields." )
else :
error = error . error_dict
else :
error = { field or NON_FIELD_ERRORS : error . error_list }
for field , error_list in error . items ( ) :
if field not in self . errors :
if field != NON_FIELD_ERRORS and field not in self . fields :
raise ValueError ( "'%s' has no field named '%s'." % ( self . __class__ . __name__ , field ) )
if field == NON_FIELD_ERRORS :
self . _errors [ field ] = self . error_class ( error_class = 'nonfield' )
else :
self . _errors [ field ] = self . error_class ( )
self . _errors [ field ] . extend ( error_list )
if field in self . cleaned_data :
del self . cleaned_data [ field ]
def has_error ( self , field , code = None ) :
if code is None :
return field in self . errors
if field in self . errors :
for error in self . errors . as_data ( ) [ field ] :
if error . code == code :
return True
return False
def full_clean ( self ) :
self . _errors = ErrorDict ( )
if not self . is_bound :
return
self . cleaned_data = { }
if self . empty_permitted and not self . has_changed ( ) :
return
self . _clean_fields ( )
self . _clean_form ( )
self . _post_clean ( )
def _clean_fields ( self ) :
for name , field in self . fields . items ( ) :
value = field . widget . value_from_datadict ( self . data , self . files , self . add_prefix ( name ) )
try :
if isinstance ( field , FileField ) :
initial = self . initial . get ( name , field . initial )
value = field . clean ( value , initial )
else :
value = field . clean ( value )
self . cleaned_data [ name ] = value
if hasattr ( self , 'clean_%s' % name ) :
value = getattr ( self , 'clean_%s' % name ) ( )
self . cleaned_data [ name ] = value
except ValidationError as e :
self . add_error ( name , e )
def _clean_form ( self ) :
try :
cleaned_data = self . clean ( )
except ValidationError as e :
self . add_error ( None , e )
else :
if cleaned_data is not None :
self . cleaned_data = cleaned_data
def _post_clean ( self ) :
pass
def clean ( self ) :
return self . cleaned_data
def has_changed ( self ) :
return bool ( self . changed_data )
@ property
def changed_data ( self ) :
if self . _changed_data is None :
self . _changed_data = [ ]
for name , field in self . fields . items ( ) :
prefixed_name = self . add_prefix ( name )
data_value = field . widget . value_from_datadict ( self . data , self . files , prefixed_name )
if not field . show_hidden_initial :
initial_value = self . initial . get ( name , field . initial )
if callable ( initial_value ) :
initial_value = initial_value ( )
else :
initial_prefixed_name = self . add_initial_prefix ( name )
hidden_widget = field . hidden_widget ( )
try :
initial_value = field . to_python ( hidden_widget . value_from_datadict ( self . data , self . files , initial_prefixed_name ) )
except ValidationError :
self . _changed_data . append ( name )
continue
if field . _has_changed ( initial_value , data_value ) :
self . _changed_data . append ( name )
return self . _changed_data
@ property
def media ( self ) :
media = Media ( )
for field in self . fields . values ( ) :
media = media + field . widget . media
return media
def is_multipart ( self ) :
for field in self . fields . values ( ) :
if field . widget . needs_multipart_form :
return True
return False
def hidden_fields ( self ) :
return [ field for field in self if field . is_hidden ]
def visible_fields ( self ) :
return [ field for field in self if not field . is_hidden ]
class Form ( six . with_metaclass ( DeclarativeFieldsMetaclass , BaseForm ) ) :
@ python_2_unicode_compatible
class BoundField ( object ) :
def __init__ ( self , form , field , name ) :
self . form = form
self . field = field
self . name = name
self . html_name = form . add_prefix ( name )
self . html_initial_name = form . add_initial_prefix ( name )
self . html_initial_id = form . add_initial_prefix ( self . auto_id )
if self . field . label is None :
self . label = pretty_name ( name )
else :
self . label = self . field . label
self . help_text = field . help_text or ''
def __str__ ( self ) :
if self . field . show_hidden_initial :
return self . as_widget ( ) + self . as_hidden ( only_initial = True )
return self . as_widget ( )
def __iter__ ( self ) :
id_ = self . field . widget . attrs . get ( 'id' ) or self . auto_id
attrs = { 'id' : id_ } if id_ else { }
for subwidget in self . field . widget . subwidgets ( self . html_name , self . value ( ) , attrs ) :
yield subwidget
def __len__ ( self ) :
return len ( list ( self . __iter__ ( ) ) )
def __getitem__ ( self , idx ) :
return list ( self . __iter__ ( ) ) [ idx ]
@ property
def errors ( self ) :
return self . form . errors . get ( self . name , self . form . error_class ( ) )
def as_widget ( self , widget = None , attrs = None , only_initial = False ) :
if not widget :
widget = self . field . widget
if self . field . localize :
widget . is_localized = True
attrs = attrs or { }
auto_id = self . auto_id
if auto_id and 'id' not in attrs and 'id' not in widget . attrs :
if not only_initial :
attrs [ 'id' ] = auto_id
else :
attrs [ 'id' ] = self . html_initial_id
if not only_initial :
name = self . html_name
else :
name = self . html_initial_name
return force_text ( widget . render ( name , self . value ( ) , attrs = attrs ) )
def as_text ( self , attrs = None , ** kwargs ) :
return self . as_widget ( TextInput ( ) , attrs , ** kwargs )
def as_textarea ( self , attrs = None , ** kwargs ) :
return self . as_widget ( Textarea ( ) , attrs , ** kwargs )
def as_hidden ( self , attrs = None , ** kwargs ) :
return self . as_widget ( self . field . hidden_widget ( ) , attrs , ** kwargs )
@ property
def data ( self ) :
return self . field . widget . value_from_datadict ( self . form . data , self . form . files , self . html_name )
def value ( self ) :
if not self . form . is_bound :
data = self . form . initial . get ( self . name , self . field . initial )
if callable ( data ) :
data = data ( )
if ( isinstance ( data , ( datetime . datetime , datetime . time ) ) and not getattr ( self . field . widget , 'supports_microseconds' , True ) ) :
data = data . replace ( microsecond = 0 )
else :
data = self . field . bound_data ( self . data , self . form . initial . get ( self . name , self . field . initial ) )
return self . field . prepare_value ( data )
def label_tag ( self , contents = None , attrs = None , label_suffix = None ) :
contents = contents or self . label
if label_suffix is None :
label_suffix = ( self . field . label_suffix if self . field . label_suffix is not None else self . form . label_suffix )
if label_suffix and contents and contents [ - 1 ] not in _ ( ':?.!' ) :
contents = format_html ( '{0}{1}' , contents , label_suffix )
widget = self . field . widget
id_ = widget . attrs . get ( 'id' ) or self . auto_id
if id_ :
id_for_label = widget . id_for_label ( id_ )
if id_for_label :
attrs = dict ( attrs or { } , ** { 'for' : id_for_label } )
if self . field . required and hasattr ( self . form , 'required_css_class' ) :
attrs = attrs or { }
if 'class' in attrs :
attrs [ 'class' ] += ' ' + self . form . required_css_class
else :
attrs [ 'class' ] = self . form . required_css_class
attrs = flatatt ( attrs ) if attrs else ''
contents = format_html ( '' , attrs , contents )
else :
contents = conditional_escape ( contents )
return mark_safe ( contents )
def css_classes ( self , extra_classes = None ) :
if hasattr ( extra_classes , 'split' ) :
extra_classes = extra_classes . split ( )
extra_classes = set ( extra_classes or [ ] )
if self . errors and hasattr ( self . form , 'error_css_class' ) :
extra_classes . add ( self . form . error_css_class )
if self . field . required and hasattr ( self . form , 'required_css_class' ) :
extra_classes . add ( self . form . required_css_class )
return ' ' . join ( extra_classes )
@ property
def is_hidden ( self ) :
return self . field . widget . is_hidden
@ property
def auto_id ( self ) :
auto_id = self . form . auto_id
if auto_id and '%s' in smart_text ( auto_id ) :
return smart_text ( auto_id ) % self . html_name
elif auto_id :
return self . html_name
return ''
@ property
def id_for_label ( self ) :
widget = self . field . widget
id_ = widget . attrs . get ( 'id' ) or self . auto_id
return widget . id_for_label ( id_ )
from __future__ import unicode_literals
from django . core . exceptions import ValidationError
from django . forms import Form
from django . forms . fields import IntegerField , BooleanField
from django . forms . utils import ErrorList
from django . forms . widgets import HiddenInput
from django . utils . encoding import python_2_unicode_compatible
from django . utils . functional import cached_property
from django . utils . safestring import mark_safe
from django . utils import six
from django . utils . six . moves import xrange
from django . utils . translation import ungettext , ugettext as _
__all__ = ( 'BaseFormSet' , 'formset_factory' , 'all_valid' )
TOTAL_FORM_COUNT = 'TOTAL_FORMS'
INITIAL_FORM_COUNT = 'INITIAL_FORMS'
MIN_NUM_FORM_COUNT = 'MIN_NUM_FORMS'
MAX_NUM_FORM_COUNT = 'MAX_NUM_FORMS'
ORDERING_FIELD_NAME = 'ORDER'
DELETION_FIELD_NAME = 'DELETE'
DEFAULT_MIN_NUM = 0
DEFAULT_MAX_NUM = 1000
class ManagementForm ( Form ) :
def __init__ ( self , * args , ** kwargs ) :
self . base_fields [ TOTAL_FORM_COUNT ] = IntegerField ( widget = HiddenInput )
self . base_fields [ INITIAL_FORM_COUNT ] = IntegerField ( widget = HiddenInput )
self . base_fields [ MIN_NUM_FORM_COUNT ] = IntegerField ( required = False , widget = HiddenInput )
self . base_fields [ MAX_NUM_FORM_COUNT ] = IntegerField ( required = False , widget = HiddenInput )
super ( ManagementForm , self ) . __init__ ( * args , ** kwargs )
@ python_2_unicode_compatible
class BaseFormSet ( object ) :
def __init__ ( self , data = None , files = None , auto_id = 'id_%s' , prefix = None , initial = None , error_class = ErrorList ) :
self . is_bound = data is not None or files is not None
self . prefix = prefix or self . get_default_prefix ( )
self . auto_id = auto_id
self . data = data or { }
self . files = files or { }
self . initial = initial
self . error_class = error_class
self . _errors = None
self . _non_form_errors = None
def __str__ ( self ) :
return self . as_table ( )
def __iter__ ( self ) :
return iter ( self . forms )
def __getitem__ ( self , index ) :
return self . forms [ index ]
def __len__ ( self ) :
return len ( self . forms )
def __bool__ ( self ) :
return True
def __nonzero__ ( self ) :
return type ( self ) . __bool__ ( self )
@ property
def management_form ( self ) :
if self . is_bound :
form = ManagementForm ( self . data , auto_id = self . auto_id , prefix = self . prefix )
if not form . is_valid ( ) :
raise ValidationError ( _ ( 'ManagementForm data is missing or has been tampered with' ) , code = 'missing_management_form' , )
else :
form = ManagementForm ( auto_id = self . auto_id , prefix = self . prefix , initial = { TOTAL_FORM_COUNT : self . total_form_count ( ) , INITIAL_FORM_COUNT : self . initial_form_count ( ) , MIN_NUM_FORM_COUNT : self . min_num , MAX_NUM_FORM_COUNT : self . max_num } )
return form
def total_form_count ( self ) :
if self . is_bound :
return min ( self . management_form . cleaned_data [ TOTAL_FORM_COUNT ] , self . absolute_max )
else :
initial_forms = self . initial_form_count ( )
total_forms = max ( initial_forms , self . min_num ) + self . extra
if initial_forms > self . max_num >= 0 :
total_forms = initial_forms
elif total_forms > self . max_num >= 0 :
total_forms = self . max_num
return total_forms
def initial_form_count ( self ) :
if self . is_bound :
return self . management_form . cleaned_data [ INITIAL_FORM_COUNT ]
else :
initial_forms = len ( self . initial ) if self . initial else 0
return initial_forms
@ cached_property
def forms ( self ) :
forms = [ self . _construct_form ( i ) for i in xrange ( self . total_form_count ( ) ) ]
return forms
def _construct_form ( self , i , ** kwargs ) :
defaults = { 'auto_id' : self . auto_id , 'prefix' : self . add_prefix ( i ) , 'error_class' : self . error_class , }
if self . is_bound :
defaults [ 'data' ] = self . data
defaults [ 'files' ] = self . files
if self . initial and 'initial' not in kwargs :
try :
defaults [ 'initial' ] = self . initial [ i ]
except IndexError :
pass
if i >= self . initial_form_count ( ) and i >= self . min_num :
defaults [ 'empty_permitted' ] = True
defaults . update ( kwargs )
form = self . form ( ** defaults )
self . add_fields ( form , i )
return form
@ property
def initial_forms ( self ) :
return self . forms [ : self . initial_form_count ( ) ]
@ property
def extra_forms ( self ) :
return self . forms [ self . initial_form_count ( ) : ]
@ property
def empty_form ( self ) :
form = self . form ( auto_id = self . auto_id , prefix = self . add_prefix ( '__prefix__' ) , empty_permitted = True , )
self . add_fields ( form , None )
return form
@ property
def cleaned_data ( self ) :
if not self . is_valid ( ) :
raise AttributeError ( "'%s' object has no attribute 'cleaned_data'" % self . __class__ . __name__ )
return [ form . cleaned_data for form in self . forms ]
@ property
def deleted_forms ( self ) :
if not self . is_valid ( ) or not self . can_delete :
return [ ]
if not hasattr ( self , '_deleted_form_indexes' ) :
self . _deleted_form_indexes = [ ]
for i in range ( 0 , self . total_form_count ( ) ) :
form = self . forms [ i ]
if i >= self . initial_form_count ( ) and not form . has_changed ( ) :
continue
if self . _should_delete_form ( form ) :
self . _deleted_form_indexes . append ( i )
return [ self . forms [ i ] for i in self . _deleted_form_indexes ]
@ property
def ordered_forms ( self ) :
if not self . is_valid ( ) or not self . can_order :
raise AttributeError ( "'%s' object has no attribute 'ordered_forms'" % self . __class__ . __name__ )
if not hasattr ( self , '_ordering' ) :
self . _ordering = [ ]
for i in range ( 0 , self . total_form_count ( ) ) :
form = self . forms [ i ]
if i >= self . initial_form_count ( ) and not form . has_changed ( ) :
continue
if self . can_delete and self . _should_delete_form ( form ) :
continue
self . _ordering . append ( ( i , form . cleaned_data [ ORDERING_FIELD_NAME ] ) )
def compare_ordering_key ( k ) :
if k [ 1 ] is None :
return ( 1 , 0 )
return ( 0 , k [ 1 ] )
self . _ordering . sort ( key = compare_ordering_key )
return [ self . forms [ i [ 0 ] ] for i in self . _ordering ]
@ classmethod
def get_default_prefix ( cls ) :
return 'form'
def non_form_errors ( self ) :
if self . _non_form_errors is None :
self . full_clean ( )
return self . _non_form_errors
@ property
def errors ( self ) :
if self . _errors is None :
self . full_clean ( )
return self . _errors
def total_error_count ( self ) :
return len ( self . non_form_errors ( ) ) + sum ( len ( form_errors ) for form_errors in self . errors )
def _should_delete_form ( self , form ) :
return form . cleaned_data . get ( DELETION_FIELD_NAME , False )
def is_valid ( self ) :
if not self . is_bound :
return False
forms_valid = True
self . errors
for i in range ( 0 , self . total_form_count ( ) ) :
form = self . forms [ i ]
if self . can_delete :
if self . _should_delete_form ( form ) :
continue
forms_valid &= form . is_valid ( )
return forms_valid and not self . non_form_errors ( )
def full_clean ( self ) :
self . _errors = [ ]
self . _non_form_errors = self . error_class ( )
if not self . is_bound :
return
for i in range ( 0 , self . total_form_count ( ) ) :
form = self . forms [ i ]
self . _errors . append ( form . errors )
try :
if ( self . validate_max and self . total_form_count ( ) - len ( self . deleted_forms ) > self . max_num ) or self . management_form . cleaned_data [ TOTAL_FORM_COUNT ] > self . absolute_max :
raise ValidationError ( ungettext ( "Please submit %d or fewer forms." , "Please submit %d or fewer forms." , self . max_num ) % self . max_num , code = 'too_many_forms' , )
if ( self . validate_min and self . total_form_count ( ) - len ( self . deleted_forms ) < self . min_num ) :
raise ValidationError ( ungettext ( "Please submit %d or more forms." , "Please submit %d or more forms." , self . min_num ) % self . min_num , code = 'too_few_forms' )
self . clean ( )
except ValidationError as e :
self . _non_form_errors = self . error_class ( e . error_list )
def clean ( self ) :
pass
def has_changed ( self ) :
return any ( form . has_changed ( ) for form in self )
def add_fields ( self , form , index ) :
if self . can_order :
if index is not None and index < self . initial_form_count ( ) :
form . fields [ ORDERING_FIELD_NAME ] = IntegerField ( label = _ ( 'Order' ) , initial = index + 1 , required = False )
else :
form . fields [ ORDERING_FIELD_NAME ] = IntegerField ( label = _ ( 'Order' ) , required = False )
if self . can_delete :
form . fields [ DELETION_FIELD_NAME ] = BooleanField ( label = _ ( 'Delete' ) , required = False )
def add_prefix ( self , index ) :
return '%s-%s' % ( self . prefix , index )
def is_multipart ( self ) :
if self . forms :
return self . forms [ 0 ] . is_multipart ( )
else :
return self . empty_form . is_multipart ( )
@ property
def media ( self ) :
if self . forms :
return self . forms [ 0 ] . media
else :
return self . empty_form . media
def as_table ( self ) :
forms = ' ' . join ( form . as_table ( ) for form in self )
return mark_safe ( '\n' . join ( [ six . text_type ( self . management_form ) , forms ] ) )
def as_p ( self ) :
forms = ' ' . join ( form . as_p ( ) for form in self )
return mark_safe ( '\n' . join ( [ six . text_type ( self . management_form ) , forms ] ) )
def as_ul ( self ) :
forms = ' ' . join ( form . as_ul ( ) for form in self )
return mark_safe ( '\n' . join ( [ six . text_type ( self . management_form ) , forms ] ) )
def formset_factory ( form , formset = BaseFormSet , extra = 1 , can_order = False , can_delete = False , max_num = None , validate_max = False , min_num = None , validate_min = False ) :
if min_num is None :
min_num = DEFAULT_MIN_NUM
if max_num is None :
max_num = DEFAULT_MAX_NUM
absolute_max = max_num + DEFAULT_MAX_NUM
attrs = { 'form' : form , 'extra' : extra , 'can_order' : can_order , 'can_delete' : can_delete , 'min_num' : min_num , 'max_num' : max_num , 'absolute_max' : absolute_max , 'validate_min' : validate_min , 'validate_max' : validate_max }
return type ( form . __name__ + str ( 'FormSet' ) , ( formset , ) , attrs )
def all_valid ( formsets ) :
valid = True
for formset in formsets :
if not formset . is_valid ( ) :
valid = False
return valid
from __future__ import unicode_literals
from collections import OrderedDict
import warnings
from django . core . exceptions import ( ImproperlyConfigured , ValidationError , NON_FIELD_ERRORS , FieldError )
from django . forms . fields import Field , ChoiceField
from django . forms . forms import DeclarativeFieldsMetaclass , BaseForm
from django . forms . formsets import BaseFormSet , formset_factory
from django . forms . utils import ErrorList
from django . forms . widgets import ( SelectMultiple , HiddenInput , MultipleHiddenInput )
from django . utils import six
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . encoding import smart_text , force_text
from django . utils . text import get_text_list , capfirst
from django . utils . translation import ugettext_lazy as _ , ugettext
__all__ = ( 'ModelForm' , 'BaseModelForm' , 'model_to_dict' , 'fields_for_model' , 'save_instance' , 'ModelChoiceField' , 'ModelMultipleChoiceField' , 'ALL_FIELDS' , 'BaseModelFormSet' , 'modelformset_factory' , 'BaseInlineFormSet' , 'inlineformset_factory' , )
ALL_FIELDS = '__all__'
def construct_instance ( form , instance , fields = None , exclude = None ) :
from django . db import models
opts = instance . _meta
cleaned_data = form . cleaned_data
file_field_list = [ ]
for f in opts . fields :
if not f . editable or isinstance ( f , models . AutoField ) or f . name not in cleaned_data :
continue
if fields is not None and f . name not in fields :
continue
if exclude and f . name in exclude :
continue
if isinstance ( f , models . FileField ) :
file_field_list . append ( f )
else :
f . save_form_data ( instance , cleaned_data [ f . name ] )
for f in file_field_list :
f . save_form_data ( instance , cleaned_data [ f . name ] )
return instance
def save_instance ( form , instance , fields = None , fail_message = 'saved' , commit = True , exclude = None , construct = True ) :
if construct :
instance = construct_instance ( form , instance , fields , exclude )
opts = instance . _meta
if form . errors :
raise ValueError ( "The %s could not be %s because the data didn't" " validate." % ( opts . object_name , fail_message ) )
def save_m2m ( ) :
cleaned_data = form . cleaned_data
for f in opts . many_to_many + opts . virtual_fields :
if not hasattr ( f , 'save_form_data' ) :
continue
if fields and f . name not in fields :
continue
if exclude and f . name in exclude :
continue
if f . name in cleaned_data :
f . save_form_data ( instance , cleaned_data [ f . name ] )
if commit :
instance . save ( )
save_m2m ( )
else :
form . save_m2m = save_m2m
return instance
def model_to_dict ( instance , fields = None , exclude = None ) :
from django . db . models . fields . related import ManyToManyField
opts = instance . _meta
data = { }
for f in opts . concrete_fields + opts . virtual_fields + opts . many_to_many :
if not getattr ( f , 'editable' , False ) :
continue
if fields and f . name not in fields :
continue
if exclude and f . name in exclude :
continue
if isinstance ( f , ManyToManyField ) :
if instance . pk is None :
data [ f . name ] = [ ]
else :
qs = f . value_from_object ( instance )
if qs . _result_cache is not None :
data [ f . name ] = [ item . pk for item in qs ]
else :
data [ f . name ] = list ( qs . values_list ( 'pk' , flat = True ) )
else :
data [ f . name ] = f . value_from_object ( instance )
return data
def fields_for_model ( model , fields = None , exclude = None , widgets = None , formfield_callback = None , localized_fields = None , labels = None , help_texts = None , error_messages = None ) :
field_list = [ ]
ignored = [ ]
opts = model . _meta
from django . db . models . fields import Field as ModelField
sortable_virtual_fields = [ f for f in opts . virtual_fields if isinstance ( f , ModelField ) ]
for f in sorted ( opts . concrete_fields + sortable_virtual_fields + opts . many_to_many ) :
if not getattr ( f , 'editable' , False ) :
continue
if fields is not None and f . name not in fields :
continue
if exclude and f . name in exclude :
continue
kwargs = { }
if widgets and f . name in widgets :
kwargs [ 'widget' ] = widgets [ f . name ]
if localized_fields == ALL_FIELDS or ( localized_fields and f . name in localized_fields ) :
kwargs [ 'localize' ] = True
if labels and f . name in labels :
kwargs [ 'label' ] = labels [ f . name ]
if help_texts and f . name in help_texts :
kwargs [ 'help_text' ] = help_texts [ f . name ]
if error_messages and f . name in error_messages :
kwargs [ 'error_messages' ] = error_messages [ f . name ]
if formfield_callback is None :
formfield = f . formfield ( ** kwargs )
elif not callable ( formfield_callback ) :
raise TypeError ( 'formfield_callback must be a function or callable' )
else :
formfield = formfield_callback ( f , ** kwargs )
if formfield :
field_list . append ( ( f . name , formfield ) )
else :
ignored . append ( f . name )
field_dict = OrderedDict ( field_list )
if fields :
field_dict = OrderedDict ( [ ( f , field_dict . get ( f ) ) for f in fields if ( ( not exclude ) or ( exclude and f not in exclude ) ) and ( f not in ignored ) ] )
return field_dict
class ModelFormOptions ( object ) :
def __init__ ( self , options = None ) :
self . model = getattr ( options , 'model' , None )
self . fields = getattr ( options , 'fields' , None )
self . exclude = getattr ( options , 'exclude' , None )
self . widgets = getattr ( options , 'widgets' , None )
self . localized_fields = getattr ( options , 'localized_fields' , None )
self . labels = getattr ( options , 'labels' , None )
self . help_texts = getattr ( options , 'help_texts' , None )
self . error_messages = getattr ( options , 'error_messages' , None )
class ModelFormMetaclass ( DeclarativeFieldsMetaclass ) :
def __new__ ( mcs , name , bases , attrs ) :
formfield_callback = attrs . pop ( 'formfield_callback' , None )
new_class = super ( ModelFormMetaclass , mcs ) . __new__ ( mcs , name , bases , attrs )
if bases == ( BaseModelForm , ) :
return new_class
opts = new_class . _meta = ModelFormOptions ( getattr ( new_class , 'Meta' , None ) )
for opt in [ 'fields' , 'exclude' , 'localized_fields' ] :
value = getattr ( opts , opt )
if isinstance ( value , six . string_types ) and value != ALL_FIELDS :
msg = ( "%(model)s.Meta.%(opt)s cannot be a string. " "Did you mean to type: ('%(value)s',)?" % { 'model' : new_class . __name__ , 'opt' : opt , 'value' : value , } )
raise TypeError ( msg )
if opts . model :
if opts . fields is None and opts . exclude is None :
raise ImproperlyConfigured ( "Creating a ModelForm without either the 'fields' attribute " "or the 'exclude' attribute is prohibited; form %s " "needs updating." % name )
if opts . fields == ALL_FIELDS :
opts . fields = None
fields = fields_for_model ( opts . model , opts . fields , opts . exclude , opts . widgets , formfield_callback , opts . localized_fields , opts . labels , opts . help_texts , opts . error_messages )
none_model_fields = [ k for k , v in six . iteritems ( fields ) if not v ]
missing_fields = ( set ( none_model_fields ) - set ( new_class . declared_fields . keys ( ) ) )
if missing_fields :
message = 'Unknown field(s) (%s) specified for %s'
message = message % ( ', ' . join ( missing_fields ) , opts . model . __name__ )
raise FieldError ( message )
fields . update ( new_class . declared_fields )
else :
fields = new_class . declared_fields
new_class . base_fields = fields
return new_class
class BaseModelForm ( BaseForm ) :
def __init__ ( self , data = None , files = None , auto_id = 'id_%s' , prefix = None , initial = None , error_class = ErrorList , label_suffix = None , empty_permitted = False , instance = None ) :
opts = self . _meta
if opts . model is None :
raise ValueError ( 'ModelForm has no model class specified.' )
if instance is None :
self . instance = opts . model ( )
object_data = { }
else :
self . instance = instance
object_data = model_to_dict ( instance , opts . fields , opts . exclude )
if initial is not None :
object_data . update ( initial )
self . _validate_unique = False
super ( BaseModelForm , self ) . __init__ ( data , files , auto_id , prefix , object_data , error_class , label_suffix , empty_permitted )
for field_name in self . fields :
formfield = self . fields [ field_name ]
if hasattr ( formfield , 'queryset' ) :
limit_choices_to = formfield . limit_choices_to
if limit_choices_to is not None :
if callable ( limit_choices_to ) :
limit_choices_to = limit_choices_to ( )
formfield . queryset = formfield . queryset . complex_filter ( limit_choices_to )
def _get_validation_exclusions ( self ) :
exclude = [ ]
for f in self . instance . _meta . fields :
field = f . name
if field not in self . fields :
exclude . append ( f . name )
elif self . _meta . fields and field not in self . _meta . fields :
exclude . append ( f . name )
elif self . _meta . exclude and field in self . _meta . exclude :
exclude . append ( f . name )
elif field in self . _errors . keys ( ) :
exclude . append ( f . name )
else :
form_field = self . fields [ field ]
field_value = self . cleaned_data . get ( field , None )
if not f . blank and not form_field . required and field_value in form_field . empty_values :
exclude . append ( f . name )
return exclude
def clean ( self ) :
self . _validate_unique = True
return self . cleaned_data
def _update_errors ( self , errors ) :
opts = self . _meta
for field , messages in errors . error_dict . items ( ) :
if ( field == NON_FIELD_ERRORS and opts . error_messages and NON_FIELD_ERRORS in opts . error_messages ) :
error_messages = opts . error_messages [ NON_FIELD_ERRORS ]
elif field in self . fields :
error_messages = self . fields [ field ] . error_messages
else :
continue
for message in messages :
if ( isinstance ( message , ValidationError ) and message . code in error_messages ) :
message . message = error_messages [ message . code ]
self . add_error ( None , errors )
def _post_clean ( self ) :
opts = self . _meta
exclude = self . _get_validation_exclusions ( )
construct_instance_exclude = list ( exclude )
for name , field in self . fields . items ( ) :
if isinstance ( field , InlineForeignKeyField ) :
if self . cleaned_data . get ( name ) is not None and self . cleaned_data [ name ] . _state . adding :
construct_instance_exclude . append ( name )
exclude . append ( name )
self . instance = construct_instance ( self , self . instance , opts . fields , construct_instance_exclude )
try :
self . instance . full_clean ( exclude = exclude , validate_unique = False )
except ValidationError as e :
self . _update_errors ( e )
if self . _validate_unique :
self . validate_unique ( )
def validate_unique ( self ) :
exclude = self . _get_validation_exclusions ( )
try :
self . instance . validate_unique ( exclude = exclude )
except ValidationError as e :
self . _update_errors ( e )
def save ( self , commit = True ) :
if self . instance . pk is None :
fail_message = 'created'
else :
fail_message = 'changed'
return save_instance ( self , self . instance , self . _meta . fields , fail_message , commit , self . _meta . exclude , construct = False )
save . alters_data = True
class ModelForm ( six . with_metaclass ( ModelFormMetaclass , BaseModelForm ) ) :
pass
def modelform_factory ( model , form = ModelForm , fields = None , exclude = None , formfield_callback = None , widgets = None , localized_fields = None , labels = None , help_texts = None , error_messages = None ) :
attrs = { 'model' : model }
if fields is not None :
attrs [ 'fields' ] = fields
if exclude is not None :
attrs [ 'exclude' ] = exclude
if widgets is not None :
attrs [ 'widgets' ] = widgets
if localized_fields is not None :
attrs [ 'localized_fields' ] = localized_fields
if labels is not None :
attrs [ 'labels' ] = labels
if help_texts is not None :
attrs [ 'help_texts' ] = help_texts
if error_messages is not None :
attrs [ 'error_messages' ] = error_messages
parent = ( object , )
if hasattr ( form , 'Meta' ) :
parent = ( form . Meta , object )
Meta = type ( str ( 'Meta' ) , parent , attrs )
class_name = model . __name__ + str ( 'Form' )
form_class_attrs = { 'Meta' : Meta , 'formfield_callback' : formfield_callback }
if ( getattr ( Meta , 'fields' , None ) is None and getattr ( Meta , 'exclude' , None ) is None ) :
raise ImproperlyConfigured ( "Calling modelform_factory without defining 'fields' or " "'exclude' explicitly is prohibited." )
return type ( form ) ( class_name , ( form , ) , form_class_attrs )
class BaseModelFormSet ( BaseFormSet ) :
model = None
def __init__ ( self , data = None , files = None , auto_id = 'id_%s' , prefix = None , queryset = None , ** kwargs ) :
self . queryset = queryset
self . initial_extra = kwargs . pop ( 'initial' , None )
defaults = { 'data' : data , 'files' : files , 'auto_id' : auto_id , 'prefix' : prefix }
defaults . update ( kwargs )
super ( BaseModelFormSet , self ) . __init__ ( ** defaults )
def initial_form_count ( self ) :
if not ( self . data or self . files ) :
return len ( self . get_queryset ( ) )
return super ( BaseModelFormSet , self ) . initial_form_count ( )
def _existing_object ( self , pk ) :
if not hasattr ( self , '_object_dict' ) :
self . _object_dict = dict ( ( o . pk , o ) for o in self . get_queryset ( ) )
return self . _object_dict . get ( pk )
def _get_to_python ( self , field ) :
while field . rel is not None :
field = field . rel . get_related_field ( )
return field . to_python
def _construct_form ( self , i , ** kwargs ) :
if self . is_bound and i < self . initial_form_count ( ) :
pk_key = "%s-%s" % ( self . add_prefix ( i ) , self . model . _meta . pk . name )
pk = self . data [ pk_key ]
pk_field = self . model . _meta . pk
to_python = self . _get_to_python ( pk_field )
pk = to_python ( pk )
kwargs [ 'instance' ] = self . _existing_object ( pk )
if i < self . initial_form_count ( ) and 'instance' not in kwargs :
kwargs [ 'instance' ] = self . get_queryset ( ) [ i ]
if i >= self . initial_form_count ( ) and self . initial_extra :
try :
kwargs [ 'initial' ] = self . initial_extra [ i - self . initial_form_count ( ) ]
except IndexError :
pass
return super ( BaseModelFormSet , self ) . _construct_form ( i , ** kwargs )
def get_queryset ( self ) :
if not hasattr ( self , '_queryset' ) :
if self . queryset is not None :
qs = self . queryset
else :
qs = self . model . _default_manager . get_queryset ( )
if not qs . ordered :
qs = qs . order_by ( self . model . _meta . pk . name )
self . _queryset = qs
return self . _queryset
def save_new ( self , form , commit = True ) :
return form . save ( commit = commit )
def save_existing ( self , form , instance , commit = True ) :
return form . save ( commit = commit )
def save ( self , commit = True ) :
if not commit :
self . saved_forms = [ ]
def save_m2m ( ) :
for form in self . saved_forms :
form . save_m2m ( )
self . save_m2m = save_m2m
return self . save_existing_objects ( commit ) + self . save_new_objects ( commit )
save . alters_data = True
def clean ( self ) :
self . validate_unique ( )
def validate_unique ( self ) :
all_unique_checks = set ( )
all_date_checks = set ( )
forms_to_delete = self . deleted_forms
valid_forms = [ form for form in self . forms if form . is_valid ( ) and form not in forms_to_delete ]
for form in valid_forms :
exclude = form . _get_validation_exclusions ( )
unique_checks , date_checks = form . instance . _get_unique_checks ( exclude = exclude )
all_unique_checks = all_unique_checks . union ( set ( unique_checks ) )
all_date_checks = all_date_checks . union ( set ( date_checks ) )
errors = [ ]
for uclass , unique_check in all_unique_checks :
seen_data = set ( )
for form in valid_forms :
row_data = ( form . cleaned_data [ field ] for field in unique_check if field in form . cleaned_data )
row_data = tuple ( d . _get_pk_val ( ) if hasattr ( d , '_get_pk_val' ) else d for d in row_data )
if row_data and None not in row_data :
if row_data in seen_data :
errors . append ( self . get_unique_error_message ( unique_check ) )
form . _errors [ NON_FIELD_ERRORS ] = self . error_class ( [ self . get_form_error ( ) ] )
for field in unique_check :
if field in form . cleaned_data :
del form . cleaned_data [ field ]
seen_data . add ( row_data )
for date_check in all_date_checks :
seen_data = set ( )
uclass , lookup , field , unique_for = date_check
for form in valid_forms :
if ( form . cleaned_data and form . cleaned_data [ field ] is not None and form . cleaned_data [ unique_for ] is not None ) :
if lookup == 'date' :
date = form . cleaned_data [ unique_for ]
date_data = ( date . year , date . month , date . day )
else :
date_data = ( getattr ( form . cleaned_data [ unique_for ] , lookup ) , )
data = ( form . cleaned_data [ field ] , ) + date_data
if data in seen_data :
errors . append ( self . get_date_error_message ( date_check ) )
form . _errors [ NON_FIELD_ERRORS ] = self . error_class ( [ self . get_form_error ( ) ] )
del form . cleaned_data [ field ]
seen_data . add ( data )
if errors :
raise ValidationError ( errors )
def get_unique_error_message ( self , unique_check ) :
if len ( unique_check ) == 1 :
return ugettext ( "Please correct the duplicate data for %(field)s." ) % { "field" : unique_check [ 0 ] , }
else :
return ugettext ( "Please correct the duplicate data for %(field)s, " "which must be unique." ) % { "field" : get_text_list ( unique_check , six . text_type ( _ ( "and" ) ) ) , }
def get_date_error_message ( self , date_check ) :
return ugettext ( "Please correct the duplicate data for %(field_name)s " "which must be unique for the %(lookup)s in %(date_field)s." ) % { 'field_name' : date_check [ 2 ] , 'date_field' : date_check [ 3 ] , 'lookup' : six . text_type ( date_check [ 1 ] ) , }
def get_form_error ( self ) :
return ugettext ( "Please correct the duplicate values below." )
def save_existing_objects ( self , commit = True ) :
self . changed_objects = [ ]
self . deleted_objects = [ ]
if not self . initial_forms :
return [ ]
saved_instances = [ ]
forms_to_delete = self . deleted_forms
for form in self . initial_forms :
obj = form . instance
if form in forms_to_delete :
if obj . pk is None :
continue
self . deleted_objects . append ( obj )
if commit :
obj . delete ( )
elif form . has_changed ( ) :
self . changed_objects . append ( ( obj , form . changed_data ) )
saved_instances . append ( self . save_existing ( form , obj , commit = commit ) )
if not commit :
self . saved_forms . append ( form )
return saved_instances
def save_new_objects ( self , commit = True ) :
self . new_objects = [ ]
for form in self . extra_forms :
if not form . has_changed ( ) :
continue
if self . can_delete and self . _should_delete_form ( form ) :
continue
self . new_objects . append ( self . save_new ( form , commit = commit ) )
if not commit :
self . saved_forms . append ( form )
return self . new_objects
def add_fields ( self , form , index ) :
from django . db . models import AutoField , OneToOneField , ForeignKey
self . _pk_field = pk = self . model . _meta . pk
def pk_is_not_editable ( pk ) :
return ( ( not pk . editable ) or ( pk . auto_created or isinstance ( pk , AutoField ) ) or ( pk . rel and pk . rel . parent_link and pk_is_not_editable ( pk . rel . to . _meta . pk ) ) )
if pk_is_not_editable ( pk ) or pk . name not in form . fields :
if form . is_bound :
pk_value = form . instance . pk
else :
try :
if index is not None :
pk_value = self . get_queryset ( ) [ index ] . pk
else :
pk_value = None
except IndexError :
pk_value = None
if isinstance ( pk , OneToOneField ) or isinstance ( pk , ForeignKey ) :
qs = pk . rel . to . _default_manager . get_queryset ( )
else :
qs = self . model . _default_manager . get_queryset ( )
qs = qs . using ( form . instance . _state . db )
if form . _meta . widgets :
widget = form . _meta . widgets . get ( self . _pk_field . name , HiddenInput )
else :
widget = HiddenInput
form . fields [ self . _pk_field . name ] = ModelChoiceField ( qs , initial = pk_value , required = False , widget = widget )
super ( BaseModelFormSet , self ) . add_fields ( form , index )
def modelformset_factory ( model , form = ModelForm , formfield_callback = None , formset = BaseModelFormSet , extra = 1 , can_delete = False , can_order = False , max_num = None , fields = None , exclude = None , widgets = None , validate_max = False , localized_fields = None , labels = None , help_texts = None , error_messages = None , min_num = None , validate_min = False ) :
meta = getattr ( form , 'Meta' , None )
if meta is None :
meta = type ( str ( 'Meta' ) , ( object , ) , { } )
if ( getattr ( meta , 'fields' , fields ) is None and getattr ( meta , 'exclude' , exclude ) is None ) :
raise ImproperlyConfigured ( "Calling modelformset_factory without defining 'fields' or " "'exclude' explicitly is prohibited." )
form = modelform_factory ( model , form = form , fields = fields , exclude = exclude , formfield_callback = formfield_callback , widgets = widgets , localized_fields = localized_fields , labels = labels , help_texts = help_texts , error_messages = error_messages )
FormSet = formset_factory ( form , formset , extra = extra , min_num = min_num , max_num = max_num , can_order = can_order , can_delete = can_delete , validate_min = validate_min , validate_max = validate_max )
FormSet . model = model
return FormSet
class BaseInlineFormSet ( BaseModelFormSet ) :
def __init__ ( self , data = None , files = None , instance = None , save_as_new = False , prefix = None , queryset = None , ** kwargs ) :
if instance is None :
self . instance = self . fk . rel . to ( )
else :
self . instance = instance
self . save_as_new = save_as_new
if queryset is None :
queryset = self . model . _default_manager
if self . instance . pk is not None :
qs = queryset . filter ( ** { self . fk . name : self . instance } )
else :
qs = queryset . none ( )
super ( BaseInlineFormSet , self ) . __init__ ( data , files , prefix = prefix , queryset = qs , ** kwargs )
def initial_form_count ( self ) :
if self . save_as_new :
return 0
return super ( BaseInlineFormSet , self ) . initial_form_count ( )
def _construct_form ( self , i , ** kwargs ) :
form = super ( BaseInlineFormSet , self ) . _construct_form ( i , ** kwargs )
if self . save_as_new :
form . data [ form . add_prefix ( self . _pk_field . name ) ] = None
form . data [ form . add_prefix ( self . fk . name ) ] = None
setattr ( form . instance , self . fk . get_attname ( ) , self . instance . pk )
return form
@ classmethod
def get_default_prefix ( cls ) :
from django . db . models . fields . related import RelatedObject
return RelatedObject ( cls . fk . rel . to , cls . model , cls . fk ) . get_accessor_name ( ) . replace ( '+' , '' )
def save_new ( self , form , commit = True ) :
obj = form . save ( commit = False )
pk_value = getattr ( self . instance , self . fk . rel . field_name )
setattr ( obj , self . fk . get_attname ( ) , getattr ( pk_value , 'pk' , pk_value ) )
if commit :
obj . save ( )
if commit and hasattr ( form , 'save_m2m' ) :
form . save_m2m ( )
return obj
def add_fields ( self , form , index ) :
super ( BaseInlineFormSet , self ) . add_fields ( form , index )
if self . _pk_field == self . fk :
name = self . _pk_field . name
kwargs = { 'pk_field' : True }
else :
name = self . fk . name
kwargs = { 'label' : getattr ( form . fields . get ( name ) , 'label' , capfirst ( self . fk . verbose_name ) ) }
if self . fk . rel . field_name != self . fk . rel . to . _meta . pk . name :
kwargs [ 'to_field' ] = self . fk . rel . field_name
form . fields [ name ] = InlineForeignKeyField ( self . instance , ** kwargs )
if form . _meta . fields :
if isinstance ( form . _meta . fields , tuple ) :
form . _meta . fields = list ( form . _meta . fields )
form . _meta . fields . append ( self . fk . name )
def get_unique_error_message ( self , unique_check ) :
unique_check = [ field for field in unique_check if field != self . fk . name ]
return super ( BaseInlineFormSet , self ) . get_unique_error_message ( unique_check )
def _get_foreign_key ( parent_model , model , fk_name = None , can_fail = False ) :
from django . db . models import ForeignKey
opts = model . _meta
if fk_name :
fks_to_parent = [ f for f in opts . fields if f . name == fk_name ]
if len ( fks_to_parent ) == 1 :
fk = fks_to_parent [ 0 ]
if not isinstance ( fk , ForeignKey ) or ( fk . rel . to != parent_model and fk . rel . to not in parent_model . _meta . get_parent_list ( ) ) :
raise ValueError ( "fk_name '%s' is not a ForeignKey to '%s.%'." % ( fk_name , parent_model . _meta . app_label , parent_model . _meta . object_name ) )
elif len ( fks_to_parent ) == 0 :
raise ValueError ( "'%s.%s' has no field named '%s'." % ( model . _meta . app_label , model . _meta . object_name , fk_name ) )
else :
fks_to_parent = [ f for f in opts . fields if isinstance ( f , ForeignKey ) and ( f . rel . to == parent_model or f . rel . to in parent_model . _meta . get_parent_list ( ) ) ]
if len ( fks_to_parent ) == 1 :
fk = fks_to_parent [ 0 ]
elif len ( fks_to_parent ) == 0 :
if can_fail :
return
raise ValueError ( "'%s.%s' has no ForeignKey to '%s.%s'." % ( model . _meta . app_label , model . _meta . object_name , parent_model . _meta . app_label , parent_model . _meta . object_name ) )
else :
raise ValueError ( "'%s.%s' has more than one ForeignKey to '%s.%s'." % ( model . _meta . app_label , model . _meta . object_name , parent_model . _meta . app_label , parent_model . _meta . object_name ) )
return fk
def inlineformset_factory ( parent_model , model , form = ModelForm , formset = BaseInlineFormSet , fk_name = None , fields = None , exclude = None , extra = 3 , can_order = False , can_delete = True , max_num = None , formfield_callback = None , widgets = None , validate_max = False , localized_fields = None , labels = None , help_texts = None , error_messages = None , min_num = None , validate_min = False ) :
fk = _get_foreign_key ( parent_model , model , fk_name = fk_name )
if fk . unique :
max_num = 1
kwargs = { 'form' : form , 'formfield_callback' : formfield_callback , 'formset' : formset , 'extra' : extra , 'can_delete' : can_delete , 'can_order' : can_order , 'fields' : fields , 'exclude' : exclude , 'min_num' : min_num , 'max_num' : max_num , 'widgets' : widgets , 'validate_min' : validate_min , 'validate_max' : validate_max , 'localized_fields' : localized_fields , 'labels' : labels , 'help_texts' : help_texts , 'error_messages' : error_messages , }
FormSet = modelformset_factory ( model , ** kwargs )
FormSet . fk = fk
return FormSet
class InlineForeignKeyField ( Field ) :
widget = HiddenInput
default_error_messages = { 'invalid_choice' : _ ( 'The inline foreign key did not match the parent instance primary key.' ) , }
def __init__ ( self , parent_instance , * args , ** kwargs ) :
self . parent_instance = parent_instance
self . pk_field = kwargs . pop ( "pk_field" , False )
self . to_field = kwargs . pop ( "to_field" , None )
if self . parent_instance is not None :
if self . to_field :
kwargs [ "initial" ] = getattr ( self . parent_instance , self . to_field )
else :
kwargs [ "initial" ] = self . parent_instance . pk
kwargs [ "required" ] = False
super ( InlineForeignKeyField , self ) . __init__ ( * args , ** kwargs )
def clean ( self , value ) :
if value in self . empty_values :
if self . pk_field :
return None
return self . parent_instance
if self . to_field :
orig = getattr ( self . parent_instance , self . to_field )
else :
orig = self . parent_instance . pk
if force_text ( value ) != force_text ( orig ) :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' )
return self . parent_instance
def _has_changed ( self , initial , data ) :
return False
class ModelChoiceIterator ( object ) :
def __init__ ( self , field ) :
self . field = field
self . queryset = field . queryset
def __iter__ ( self ) :
if self . field . empty_label is not None :
yield ( "" , self . field . empty_label )
if self . field . cache_choices :
if self . field . choice_cache is None :
self . field . choice_cache = [ self . choice ( obj ) for obj in self . queryset . all ( ) ]
for choice in self . field . choice_cache :
yield choice
else :
for obj in self . queryset . all ( ) :
yield self . choice ( obj )
def __len__ ( self ) :
return ( len ( self . queryset ) + ( 1 if self . field . empty_label is not None else 0 ) )
def choice ( self , obj ) :
return ( self . field . prepare_value ( obj ) , self . field . label_from_instance ( obj ) )
class ModelChoiceField ( ChoiceField ) :
default_error_messages = { 'invalid_choice' : _ ( 'Select a valid choice. That choice is not one of' ' the available choices.' ) , }
def __init__ ( self , queryset , empty_label = "---------" , cache_choices = None , required = True , widget = None , label = None , initial = None , help_text = '' , to_field_name = None , limit_choices_to = None , * args , ** kwargs ) :
if required and ( initial is not None ) :
self . empty_label = None
else :
self . empty_label = empty_label
if cache_choices is not None :
warnings . warn ( "cache_choices has been deprecated and will be " "removed in Django 1.9." , RemovedInDjango19Warning , stacklevel = 2 )
else :
cache_choices = False
self . cache_choices = cache_choices
Field . __init__ ( self , required , widget , label , initial , help_text , * args , ** kwargs )
self . queryset = queryset
self . limit_choices_to = limit_choices_to
self . choice_cache = None
self . to_field_name = to_field_name
def __deepcopy__ ( self , memo ) :
result = super ( ChoiceField , self ) . __deepcopy__ ( memo )
result . queryset = result . queryset
return result
def _get_queryset ( self ) :
return self . _queryset
def _set_queryset ( self , queryset ) :
self . _queryset = queryset
self . widget . choices = self . choices
queryset = property ( _get_queryset , _set_queryset )
def label_from_instance ( self , obj ) :
return smart_text ( obj )
def _get_choices ( self ) :
if hasattr ( self , '_choices' ) :
return self . _choices
return ModelChoiceIterator ( self )
choices = property ( _get_choices , ChoiceField . _set_choices )
def prepare_value ( self , value ) :
if hasattr ( value , '_meta' ) :
if self . to_field_name :
return value . serializable_value ( self . to_field_name )
else :
return value . pk
return super ( ModelChoiceField , self ) . prepare_value ( value )
def to_python ( self , value ) :
if value in self . empty_values :
return None
try :
key = self . to_field_name or 'pk'
value = self . queryset . get ( ** { key : value } )
except ( ValueError , self . queryset . model . DoesNotExist ) :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' )
return value
def validate ( self , value ) :
return Field . validate ( self , value )
def _has_changed ( self , initial , data ) :
initial_value = initial if initial is not None else ''
data_value = data if data is not None else ''
return force_text ( self . prepare_value ( initial_value ) ) != force_text ( data_value )
class ModelMultipleChoiceField ( ModelChoiceField ) :
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = { 'list' : _ ( 'Enter a list of values.' ) , 'invalid_choice' : _ ( 'Select a valid choice. %(value)s is not one of the' ' available choices.' ) , 'invalid_pk_value' : _ ( '"%(pk)s" is not a valid value for a primary key.' ) }
def __init__ ( self , queryset , cache_choices = None , required = True , widget = None , label = None , initial = None , help_text = '' , * args , ** kwargs ) :
super ( ModelMultipleChoiceField , self ) . __init__ ( queryset , None , cache_choices , required , widget , label , initial , help_text , * args , ** kwargs )
def to_python ( self , value ) :
if not value :
return [ ]
to_py = super ( ModelMultipleChoiceField , self ) . to_python
return [ to_py ( val ) for val in value ]
def clean ( self , value ) :
if self . required and not value :
raise ValidationError ( self . error_messages [ 'required' ] , code = 'required' )
elif not self . required and not value :
return self . queryset . none ( )
if not isinstance ( value , ( list , tuple ) ) :
raise ValidationError ( self . error_messages [ 'list' ] , code = 'list' )
key = self . to_field_name or 'pk'
for pk in value :
try :
self . queryset . filter ( ** { key : pk } )
except ValueError :
raise ValidationError ( self . error_messages [ 'invalid_pk_value' ] , code = 'invalid_pk_value' , params = { 'pk' : pk } , )
qs = self . queryset . filter ( ** { '%s__in' % key : value } )
pks = set ( force_text ( getattr ( o , key ) ) for o in qs )
for val in value :
if force_text ( val ) not in pks :
raise ValidationError ( self . error_messages [ 'invalid_choice' ] , code = 'invalid_choice' , params = { 'value' : val } , )
self . run_validators ( value )
return qs
def prepare_value ( self , value ) :
if ( hasattr ( value , '__iter__' ) and not isinstance ( value , six . text_type ) and not hasattr ( value , '_meta' ) ) :
return [ super ( ModelMultipleChoiceField , self ) . prepare_value ( v ) for v in value ]
return super ( ModelMultipleChoiceField , self ) . prepare_value ( value )
def _has_changed ( self , initial , data ) :
if initial is None :
initial = [ ]
if data is None :
data = [ ]
if len ( initial ) != len ( data ) :
return True
initial_set = set ( force_text ( value ) for value in self . prepare_value ( initial ) )
data_set = set ( force_text ( value ) for value in data )
return data_set != initial_set
def modelform_defines_fields ( form_class ) :
return ( form_class is not None and ( hasattr ( form_class , '_meta' ) and ( form_class . _meta . fields is not None or form_class . _meta . exclude is not None ) ) )
import warnings
from django . utils . deprecation import RemovedInDjango19Warning
warnings . warn ( "The django.forms.util module has been renamed. " "Use django.forms.utils instead." , RemovedInDjango19Warning , stacklevel = 2 )
from django . forms . utils import *
from __future__ import unicode_literals
import json
import sys
try :
from collections import UserList
except ImportError :
from UserList import UserList
from django . conf import settings
from django . utils . encoding import force_text , python_2_unicode_compatible
from django . utils . html import format_html , format_html_join , escape
from django . utils import timezone
from django . utils . translation import ugettext_lazy as _
from django . utils import six
from django . core . exceptions import ValidationError
def flatatt ( attrs ) :
boolean_attrs = [ ]
for attr , value in list ( attrs . items ( ) ) :
if value is True :
boolean_attrs . append ( ( attr , ) )
del attrs [ attr ]
elif value is False :
del attrs [ attr ]
return ( format_html_join ( '' , ' {0}="{1}"' , sorted ( attrs . items ( ) ) ) + format_html_join ( '' , ' {0}' , sorted ( boolean_attrs ) ) )
@ python_2_unicode_compatible
class ErrorDict ( dict ) :
def as_data ( self ) :
return { f : e . as_data ( ) for f , e in self . items ( ) }
def as_json ( self , escape_html = False ) :
return json . dumps ( { f : e . get_json_data ( escape_html ) for f , e in self . items ( ) } )
def as_ul ( self ) :
if not self :
return ''
return format_html ( '' , format_html_join ( '' , '{0}{1}' , ( ( k , force_text ( v ) ) for k , v in self . items ( ) ) ) )
def as_text ( self ) :
output = [ ]
for field , errors in self . items ( ) :
output . append ( '* %s' % field )
output . append ( '\n' . join ( ' * %s' % e for e in errors ) )
return '\n' . join ( output )
def __str__ ( self ) :
return self . as_ul ( )
@ python_2_unicode_compatible
class ErrorList ( UserList , list ) :
def __init__ ( self , initlist = None , error_class = None ) :
super ( ErrorList , self ) . __init__ ( initlist )
if error_class is None :
self . error_class = 'errorlist'
else :
self . error_class = 'errorlist {}' . format ( error_class )
def as_data ( self ) :
return ValidationError ( self . data ) . error_list
def get_json_data ( self , escape_html = False ) :
errors = [ ]
for error in self . as_data ( ) :
message = list ( error ) [ 0 ]
errors . append ( { 'message' : escape ( message ) if escape_html else message , 'code' : error . code or '' , } )
return errors
def as_json ( self , escape_html = False ) :
return json . dumps ( self . get_json_data ( escape_html ) )
def as_ul ( self ) :
if not self . data :
return ''
return format_html ( '' , self . error_class , format_html_join ( '' , '{0}' , ( ( force_text ( e ) , ) for e in self ) ) )
def as_text ( self ) :
return '\n' . join ( '* %s' % e for e in self )
def __str__ ( self ) :
return self . as_ul ( )
def __repr__ ( self ) :
return repr ( list ( self ) )
def __contains__ ( self , item ) :
return item in list ( self )
def __eq__ ( self , other ) :
return list ( self ) == other
def __ne__ ( self , other ) :
return list ( self ) != other
def __getitem__ ( self , i ) :
error = self . data [ i ]
if isinstance ( error , ValidationError ) :
return list ( error ) [ 0 ]
return force_text ( error )
def from_current_timezone ( value ) :
if settings . USE_TZ and value is not None and timezone . is_naive ( value ) :
current_timezone = timezone . get_current_timezone ( )
try :
return timezone . make_aware ( value , current_timezone )
except Exception :
message = _ ( '%(datetime)s couldn\'t be interpreted ' 'in time zone %(current_timezone)s; it ' 'may be ambiguous or it may not exist.' )
params = { 'datetime' : value , 'current_timezone' : current_timezone }
six . reraise ( ValidationError , ValidationError ( message , code = 'ambiguous_timezone' , params = params , ) , sys . exc_info ( ) [ 2 ] )
return value
def to_current_timezone ( value ) :
if settings . USE_TZ and value is not None and timezone . is_aware ( value ) :
current_timezone = timezone . get_current_timezone ( )
return timezone . make_naive ( value , current_timezone )
return value
from __future__ import unicode_literals
import copy
from itertools import chain
from django . conf import settings
from django . forms . utils import flatatt , to_current_timezone
from django . utils . datastructures import MultiValueDict , MergeDict
from django . utils . encoding import force_text , python_2_unicode_compatible
from django . utils . html import conditional_escape , format_html
from django . utils . translation import ugettext_lazy
from django . utils . safestring import mark_safe
from django . utils import formats , six
from django . utils . six . moves . urllib . parse import urljoin
__all__ = ( 'Media' , 'MediaDefiningClass' , 'Widget' , 'TextInput' , 'EmailInput' , 'URLInput' , 'NumberInput' , 'PasswordInput' , 'HiddenInput' , 'MultipleHiddenInput' , 'ClearableFileInput' , 'FileInput' , 'DateInput' , 'DateTimeInput' , 'TimeInput' , 'Textarea' , 'CheckboxInput' , 'Select' , 'NullBooleanSelect' , 'SelectMultiple' , 'RadioSelect' , 'CheckboxSelectMultiple' , 'MultiWidget' , 'SplitDateTimeWidget' , 'SplitHiddenDateTimeWidget' , )
MEDIA_TYPES = ( 'css' , 'js' )
@ python_2_unicode_compatible
class Media ( object ) :
def __init__ ( self , media = None , ** kwargs ) :
if media :
media_attrs = media . __dict__
else :
media_attrs = kwargs
self . _css = { }
self . _js = [ ]
for name in MEDIA_TYPES :
getattr ( self , 'add_' + name ) ( media_attrs . get ( name , None ) )
def __str__ ( self ) :
return self . render ( )
def render ( self ) :
return mark_safe ( '\n' . join ( chain ( * [ getattr ( self , 'render_' + name ) ( ) for name in MEDIA_TYPES ] ) ) )
def render_js ( self ) :
return [ format_html ( '' , self . absolute_path ( path ) ) for path in self . _js ]
def render_css ( self ) :
media = sorted ( self . _css . keys ( ) )
return chain ( * [ [ format_html ( '' , self . absolute_path ( path ) , medium ) for path in self . _css [ medium ] ] for medium in media ] )
def absolute_path ( self , path , prefix = None ) :
if path . startswith ( ( 'http://' , 'https://' , '/' ) ) :
return path
if prefix is None :
if settings . STATIC_URL is None :
prefix = settings . MEDIA_URL
else :
prefix = settings . STATIC_URL
return urljoin ( prefix , path )
def __getitem__ ( self , name ) :
if name in MEDIA_TYPES :
return Media ( ** { str ( name ) : getattr ( self , '_' + name ) } )
raise KeyError ( 'Unknown media type "%s"' % name )
def add_js ( self , data ) :
if data :
for path in data :
if path not in self . _js :
self . _js . append ( path )
def add_css ( self , data ) :
if data :
for medium , paths in data . items ( ) :
for path in paths :
if not self . _css . get ( medium ) or path not in self . _css [ medium ] :
self . _css . setdefault ( medium , [ ] ) . append ( path )
def __add__ ( self , other ) :
combined = Media ( )
for name in MEDIA_TYPES :
getattr ( combined , 'add_' + name ) ( getattr ( self , '_' + name , None ) )
getattr ( combined , 'add_' + name ) ( getattr ( other , '_' + name , None ) )
return combined
def media_property ( cls ) :
def _media ( self ) :
sup_cls = super ( cls , self )
try :
base = sup_cls . media
except AttributeError :
base = Media ( )
definition = getattr ( cls , 'Media' , None )
if definition :
extend = getattr ( definition , 'extend' , True )
if extend :
if extend is True :
m = base
else :
m = Media ( )
for medium in extend :
m = m + base [ medium ]
return m + Media ( definition )
else :
return Media ( definition )
else :
return base
return property ( _media )
class MediaDefiningClass ( type ) :
def __new__ ( mcs , name , bases , attrs ) :
new_class = ( super ( MediaDefiningClass , mcs ) . __new__ ( mcs , name , bases , attrs ) )
if 'media' not in attrs :
new_class . media = media_property ( new_class )
return new_class
@ python_2_unicode_compatible
class SubWidget ( object ) :
def __init__ ( self , parent_widget , name , value , attrs , choices ) :
self . parent_widget = parent_widget
self . name , self . value = name , value
self . attrs , self . choices = attrs , choices
def __str__ ( self ) :
args = [ self . name , self . value , self . attrs ]
if self . choices :
args . append ( self . choices )
return self . parent_widget . render ( * args )
class Widget ( six . with_metaclass ( MediaDefiningClass ) ) :
needs_multipart_form = False
is_localized = False
is_required = False
def __init__ ( self , attrs = None ) :
if attrs is not None :
self . attrs = attrs . copy ( )
else :
self . attrs = { }
def __deepcopy__ ( self , memo ) :
obj = copy . copy ( self )
obj . attrs = self . attrs . copy ( )
memo [ id ( self ) ] = obj
return obj
@ property
def is_hidden ( self ) :
return self . input_type == 'hidden' if hasattr ( self , 'input_type' ) else False
def subwidgets ( self , name , value , attrs = None , choices = ( ) ) :
yield SubWidget ( self , name , value , attrs , choices )
def render ( self , name , value , attrs = None ) :
raise NotImplementedError ( 'subclasses of Widget must provide a render() method' )
def build_attrs ( self , extra_attrs = None , ** kwargs ) :
attrs = dict ( self . attrs , ** kwargs )
if extra_attrs :
attrs . update ( extra_attrs )
return attrs
def value_from_datadict ( self , data , files , name ) :
return data . get ( name , None )
def id_for_label ( self , id_ ) :
return id_
class Input ( Widget ) :
input_type = None
def _format_value ( self , value ) :
if self . is_localized :
return formats . localize_input ( value )
return value
def render ( self , name , value , attrs = None ) :
if value is None :
value = ''
final_attrs = self . build_attrs ( attrs , type = self . input_type , name = name )
if value != '' :
final_attrs [ 'value' ] = force_text ( self . _format_value ( value ) )
return format_html ( '' , flatatt ( final_attrs ) )
class TextInput ( Input ) :
input_type = 'text'
def __init__ ( self , attrs = None ) :
if attrs is not None :
self . input_type = attrs . pop ( 'type' , self . input_type )
super ( TextInput , self ) . __init__ ( attrs )
class NumberInput ( TextInput ) :
input_type = 'number'
class EmailInput ( TextInput ) :
input_type = 'email'
class URLInput ( TextInput ) :
input_type = 'url'
class PasswordInput ( TextInput ) :
input_type = 'password'
def __init__ ( self , attrs = None , render_value = False ) :
super ( PasswordInput , self ) . __init__ ( attrs )
self . render_value = render_value
def render ( self , name , value , attrs = None ) :
if not self . render_value :
value = None
return super ( PasswordInput , self ) . render ( name , value , attrs )
class HiddenInput ( Input ) :
input_type = 'hidden'
class MultipleHiddenInput ( HiddenInput ) :
def __init__ ( self , attrs = None , choices = ( ) ) :
super ( MultipleHiddenInput , self ) . __init__ ( attrs )
self . choices = choices
def render ( self , name , value , attrs = None , choices = ( ) ) :
if value is None :
value = [ ]
final_attrs = self . build_attrs ( attrs , type = self . input_type , name = name )
id_ = final_attrs . get ( 'id' , None )
inputs = [ ]
for i , v in enumerate ( value ) :
input_attrs = dict ( value = force_text ( v ) , ** final_attrs )
if id_ :
input_attrs [ 'id' ] = '%s_%s' % ( id_ , i )
inputs . append ( format_html ( '' , flatatt ( input_attrs ) ) )
return mark_safe ( '\n' . join ( inputs ) )
def value_from_datadict ( self , data , files , name ) :
if isinstance ( data , ( MultiValueDict , MergeDict ) ) :
return data . getlist ( name )
return data . get ( name , None )
class FileInput ( Input ) :
input_type = 'file'
needs_multipart_form = True
def render ( self , name , value , attrs = None ) :
return super ( FileInput , self ) . render ( name , None , attrs = attrs )
def value_from_datadict ( self , data , files , name ) :
return files . get ( name , None )
FILE_INPUT_CONTRADICTION = object ( )
class ClearableFileInput ( FileInput ) :
initial_text = ugettext_lazy ( 'Currently' )
input_text = ugettext_lazy ( 'Change' )
clear_checkbox_label = ugettext_lazy ( 'Clear' )
template_with_initial = '%(initial_text)s: %(initial)s %(clear_template)s
%(input_text)s: %(input)s'
template_with_clear = '%(clear)s '
url_markup_template = '{1}'
def clear_checkbox_name ( self , name ) :
return name + '-clear'
def clear_checkbox_id ( self , name ) :
return name + '_id'
def render ( self , name , value , attrs = None ) :
substitutions = { 'initial_text' : self . initial_text , 'input_text' : self . input_text , 'clear_template' : '' , 'clear_checkbox_label' : self . clear_checkbox_label , }
template = '%(input)s'
substitutions [ 'input' ] = super ( ClearableFileInput , self ) . render ( name , value , attrs )
if value and hasattr ( value , "url" ) :
template = self . template_with_initial
substitutions [ 'initial' ] = format_html ( self . url_markup_template , value . url , force_text ( value ) )
if not self . is_required :
checkbox_name = self . clear_checkbox_name ( name )
checkbox_id = self . clear_checkbox_id ( checkbox_name )
substitutions [ 'clear_checkbox_name' ] = conditional_escape ( checkbox_name )
substitutions [ 'clear_checkbox_id' ] = conditional_escape ( checkbox_id )
substitutions [ 'clear' ] = CheckboxInput ( ) . render ( checkbox_name , False , attrs = { 'id' : checkbox_id } )
substitutions [ 'clear_template' ] = self . template_with_clear % substitutions
return mark_safe ( template % substitutions )
def value_from_datadict ( self , data , files , name ) :
upload = super ( ClearableFileInput , self ) . value_from_datadict ( data , files , name )
if not self . is_required and CheckboxInput ( ) . value_from_datadict ( data , files , self . clear_checkbox_name ( name ) ) :
if upload :
return FILE_INPUT_CONTRADICTION
return False
return upload
class Textarea ( Widget ) :
def __init__ ( self , attrs = None ) :
default_attrs = { 'cols' : '40' , 'rows' : '10' }
if attrs :
default_attrs . update ( attrs )
super ( Textarea , self ) . __init__ ( default_attrs )
def render ( self , name , value , attrs = None ) :
if value is None :
value = ''
final_attrs = self . build_attrs ( attrs , name = name )
return format_html ( '' , flatatt ( final_attrs ) , force_text ( value ) )
class DateTimeBaseInput ( TextInput ) :
format_key = ''
supports_microseconds = False
def __init__ ( self , attrs = None , format = None ) :
super ( DateTimeBaseInput , self ) . __init__ ( attrs )
self . format = format if format else None
def _format_value ( self , value ) :
return formats . localize_input ( value , self . format or formats . get_format ( self . format_key ) [ 0 ] )
class DateInput ( DateTimeBaseInput ) :
format_key = 'DATE_INPUT_FORMATS'
class DateTimeInput ( DateTimeBaseInput ) :
format_key = 'DATETIME_INPUT_FORMATS'
class TimeInput ( DateTimeBaseInput ) :
format_key = 'TIME_INPUT_FORMATS'
def boolean_check ( v ) :
return not ( v is False or v is None or v == '' )
class CheckboxInput ( Widget ) :
def __init__ ( self , attrs = None , check_test = None ) :
super ( CheckboxInput , self ) . __init__ ( attrs )
self . check_test = boolean_check if check_test is None else check_test
def render ( self , name , value , attrs = None ) :
final_attrs = self . build_attrs ( attrs , type = 'checkbox' , name = name )
if self . check_test ( value ) :
final_attrs [ 'checked' ] = 'checked'
if not ( value is True or value is False or value is None or value == '' ) :
final_attrs [ 'value' ] = force_text ( value )
return format_html ( '' , flatatt ( final_attrs ) )
def value_from_datadict ( self , data , files , name ) :
if name not in data :
return False
value = data . get ( name )
values = { 'true' : True , 'false' : False }
if isinstance ( value , six . string_types ) :
value = values . get ( value . lower ( ) , value )
return bool ( value )
class Select ( Widget ) :
allow_multiple_selected = False
def __init__ ( self , attrs = None , choices = ( ) ) :
super ( Select , self ) . __init__ ( attrs )
self . choices = list ( choices )
def render ( self , name , value , attrs = None , choices = ( ) ) :
if value is None :
value = ''
final_attrs = self . build_attrs ( attrs , name = name )
output = [ format_html ( '' )
return mark_safe ( '\n' . join ( output ) )
def render_option ( self , selected_choices , option_value , option_label ) :
if option_value is None :
option_value = ''
option_value = force_text ( option_value )
if option_value in selected_choices :
selected_html = mark_safe ( ' selected="selected"' )
if not self . allow_multiple_selected :
selected_choices . remove ( option_value )
else :
selected_html = ''
return format_html ( '' , option_value , selected_html , force_text ( option_label ) )
def render_options ( self , choices , selected_choices ) :
selected_choices = set ( force_text ( v ) for v in selected_choices )
output = [ ]
for option_value , option_label in chain ( self . choices , choices ) :
if isinstance ( option_label , ( list , tuple ) ) :
output . append ( format_html ( '' )
else :
output . append ( self . render_option ( selected_choices , option_value , option_label ) )
return '\n' . join ( output )
class NullBooleanSelect ( Select ) :
def __init__ ( self , attrs = None ) :
choices = ( ( '1' , ugettext_lazy ( 'Unknown' ) ) , ( '2' , ugettext_lazy ( 'Yes' ) ) , ( '3' , ugettext_lazy ( 'No' ) ) )
super ( NullBooleanSelect , self ) . __init__ ( attrs , choices )
def render ( self , name , value , attrs = None , choices = ( ) ) :
try :
value = { True : '2' , False : '3' , '2' : '2' , '3' : '3' } [ value ]
except KeyError :
value = '1'
return super ( NullBooleanSelect , self ) . render ( name , value , attrs , choices )
def value_from_datadict ( self , data , files , name ) :
value = data . get ( name , None )
return { '2' : True , True : True , 'True' : True , '3' : False , 'False' : False , False : False } . get ( value , None )
class SelectMultiple ( Select ) :
allow_multiple_selected = True
def render ( self , name , value , attrs = None , choices = ( ) ) :
if value is None :
value = [ ]
final_attrs = self . build_attrs ( attrs , name = name )
output = [ format_html ( '' )
return mark_safe ( '\n' . join ( output ) )
def value_from_datadict ( self , data , files , name ) :
if isinstance ( data , ( MultiValueDict , MergeDict ) ) :
return data . getlist ( name )
return data . get ( name , None )
@ python_2_unicode_compatible
class ChoiceInput ( SubWidget ) :
input_type = None
def __init__ ( self , name , value , attrs , choice , index ) :
self . name = name
self . value = value
self . attrs = attrs
self . choice_value = force_text ( choice [ 0 ] )
self . choice_label = force_text ( choice [ 1 ] )
self . index = index
if 'id' in self . attrs :
self . attrs [ 'id' ] += "_%d" % self . index
def __str__ ( self ) :
return self . render ( )
def render ( self , name = None , value = None , attrs = None , choices = ( ) ) :
if self . id_for_label :
label_for = format_html ( ' for="{0}"' , self . id_for_label )
else :
label_for = ''
return format_html ( '' , label_for , self . tag ( ) , self . choice_label )
def is_checked ( self ) :
return self . value == self . choice_value
def tag ( self ) :
final_attrs = dict ( self . attrs , type = self . input_type , name = self . name , value = self . choice_value )
if self . is_checked ( ) :
final_attrs [ 'checked' ] = 'checked'
return format_html ( '' , flatatt ( final_attrs ) )
@ property
def id_for_label ( self ) :
return self . attrs . get ( 'id' , '' )
class RadioChoiceInput ( ChoiceInput ) :
input_type = 'radio'
def __init__ ( self , * args , ** kwargs ) :
super ( RadioChoiceInput , self ) . __init__ ( * args , ** kwargs )
self . value = force_text ( self . value )
class CheckboxChoiceInput ( ChoiceInput ) :
input_type = 'checkbox'
def __init__ ( self , * args , ** kwargs ) :
super ( CheckboxChoiceInput , self ) . __init__ ( * args , ** kwargs )
self . value = set ( force_text ( v ) for v in self . value )
def is_checked ( self ) :
return self . choice_value in self . value
@ python_2_unicode_compatible
class ChoiceFieldRenderer ( object ) :
choice_input_class = None
outer_html = ''
inner_html = '{choice_value}{sub_widgets}'
def __init__ ( self , name , value , attrs , choices ) :
self . name = name
self . value = value
self . attrs = attrs
self . choices = choices
def __getitem__ ( self , idx ) :
choice = self . choices [ idx ]
return self . choice_input_class ( self . name , self . value , self . attrs . copy ( ) , choice , idx )
def __str__ ( self ) :
return self . render ( )
def render ( self ) :
id_ = self . attrs . get ( 'id' , None )
output = [ ]
for i , choice in enumerate ( self . choices ) :
choice_value , choice_label = choice
if isinstance ( choice_label , ( tuple , list ) ) :
attrs_plus = self . attrs . copy ( )
if id_ :
attrs_plus [ 'id' ] += '_{0}' . format ( i )
sub_ul_renderer = ChoiceFieldRenderer ( name = self . name , value = self . value , attrs = attrs_plus , choices = choice_label )
sub_ul_renderer . choice_input_class = self . choice_input_class
output . append ( format_html ( self . inner_html , choice_value = choice_value , sub_widgets = sub_ul_renderer . render ( ) ) )
else :
w = self . choice_input_class ( self . name , self . value , self . attrs . copy ( ) , choice , i )
output . append ( format_html ( self . inner_html , choice_value = force_text ( w ) , sub_widgets = '' ) )
return format_html ( self . outer_html , id_attr = format_html ( ' id="{0}"' , id_ ) if id_ else '' , content = mark_safe ( '\n' . join ( output ) ) )
class RadioFieldRenderer ( ChoiceFieldRenderer ) :
choice_input_class = RadioChoiceInput
class CheckboxFieldRenderer ( ChoiceFieldRenderer ) :
choice_input_class = CheckboxChoiceInput
class RendererMixin ( object ) :
renderer = None
_empty_value = None
def __init__ ( self , * args , ** kwargs ) :
renderer = kwargs . pop ( 'renderer' , None )
if renderer :
self . renderer = renderer
super ( RendererMixin , self ) . __init__ ( * args , ** kwargs )
def subwidgets ( self , name , value , attrs = None , choices = ( ) ) :
for widget in self . get_renderer ( name , value , attrs , choices ) :
yield widget
def get_renderer ( self , name , value , attrs = None , choices = ( ) ) :
if value is None :
value = self . _empty_value
final_attrs = self . build_attrs ( attrs )
choices = list ( chain ( self . choices , choices ) )
return self . renderer ( name , value , final_attrs , choices )
def render ( self , name , value , attrs = None , choices = ( ) ) :
return self . get_renderer ( name , value , attrs , choices ) . render ( )
def id_for_label ( self , id_ ) :
if id_ :
id_ += '_0'
return id_
class RadioSelect ( RendererMixin , Select ) :
renderer = RadioFieldRenderer
_empty_value = ''
class CheckboxSelectMultiple ( RendererMixin , SelectMultiple ) :
renderer = CheckboxFieldRenderer
_empty_value = [ ]
class MultiWidget ( Widget ) :
def __init__ ( self , widgets , attrs = None ) :
self . widgets = [ w ( ) if isinstance ( w , type ) else w for w in widgets ]
super ( MultiWidget , self ) . __init__ ( attrs )
@ property
def is_hidden ( self ) :
return all ( w . is_hidden for w in self . widgets )
def render ( self , name , value , attrs = None ) :
if self . is_localized :
for widget in self . widgets :
widget . is_localized = self . is_localized
if not isinstance ( value , list ) :
value = self . decompress ( value )
output = [ ]
final_attrs = self . build_attrs ( attrs )
id_ = final_attrs . get ( 'id' , None )
for i , widget in enumerate ( self . widgets ) :
try :
widget_value = value [ i ]
except IndexError :
widget_value = None
if id_ :
final_attrs = dict ( final_attrs , id = '%s_%s' % ( id_ , i ) )
output . append ( widget . render ( name + '_%s' % i , widget_value , final_attrs ) )
return mark_safe ( self . format_output ( output ) )
def id_for_label ( self , id_ ) :
if id_ :
id_ += '_0'
return id_
def value_from_datadict ( self , data , files , name ) :
return [ widget . value_from_datadict ( data , files , name + '_%s' % i ) for i , widget in enumerate ( self . widgets ) ]
def format_output ( self , rendered_widgets ) :
return '' . join ( rendered_widgets )
def decompress ( self , value ) :
raise NotImplementedError ( 'Subclasses must implement this method.' )
def _get_media ( self ) :
media = Media ( )
for w in self . widgets :
media = media + w . media
return media
media = property ( _get_media )
def __deepcopy__ ( self , memo ) :
obj = super ( MultiWidget , self ) . __deepcopy__ ( memo )
obj . widgets = copy . deepcopy ( self . widgets )
return obj
@ property
def needs_multipart_form ( self ) :
return any ( w . needs_multipart_form for w in self . widgets )
class SplitDateTimeWidget ( MultiWidget ) :
supports_microseconds = False
def __init__ ( self , attrs = None , date_format = None , time_format = None ) :
widgets = ( DateInput ( attrs = attrs , format = date_format ) , TimeInput ( attrs = attrs , format = time_format ) )
super ( SplitDateTimeWidget , self ) . __init__ ( widgets , attrs )
def decompress ( self , value ) :
if value :
value = to_current_timezone ( value )
return [ value . date ( ) , value . time ( ) . replace ( microsecond = 0 ) ]
return [ None , None ]
class SplitHiddenDateTimeWidget ( SplitDateTimeWidget ) :
def __init__ ( self , attrs = None , date_format = None , time_format = None ) :
super ( SplitHiddenDateTimeWidget , self ) . __init__ ( attrs , date_format , time_format )
for widget in self . widgets :
widget . input_type = 'hidden'
from django . http . cookie import SimpleCookie , parse_cookie
from django . http . request import ( HttpRequest , QueryDict , RawPostDataException , UnreadablePostError , build_request_repr )
from django . http . response import ( HttpResponse , StreamingHttpResponse , HttpResponseRedirect , HttpResponsePermanentRedirect , HttpResponseNotModified , HttpResponseBadRequest , HttpResponseForbidden , HttpResponseNotFound , HttpResponseNotAllowed , HttpResponseGone , HttpResponseServerError , Http404 , BadHeaderError , JsonResponse )
from django . http . utils import fix_location_header , conditional_content_removal
__all__ = [ 'SimpleCookie' , 'parse_cookie' , 'HttpRequest' , 'QueryDict' , 'RawPostDataException' , 'UnreadablePostError' , 'build_request_repr' , 'HttpResponse' , 'StreamingHttpResponse' , 'HttpResponseRedirect' , 'HttpResponsePermanentRedirect' , 'HttpResponseNotModified' , 'HttpResponseBadRequest' , 'HttpResponseForbidden' , 'HttpResponseNotFound' , 'HttpResponseNotAllowed' , 'HttpResponseGone' , 'HttpResponseServerError' , 'Http404' , 'BadHeaderError' , 'fix_location_header' , 'JsonResponse' , 'conditional_content_removal' , ]
from __future__ import unicode_literals
from django . utils . encoding import force_str
from django . utils import six
from django . utils . six . moves import http_cookies
_cookie_encodes_correctly = http_cookies . SimpleCookie ( ) . value_encode ( ';' ) == ( ';' , '"\\073"' )
_tc = http_cookies . SimpleCookie ( )
try :
_tc . load ( str ( 'foo:bar=1' ) )
_cookie_allows_colon_in_names = True
except http_cookies . CookieError :
_cookie_allows_colon_in_names = False
if _cookie_encodes_correctly and _cookie_allows_colon_in_names :
SimpleCookie = http_cookies . SimpleCookie
else :
Morsel = http_cookies . Morsel
class SimpleCookie ( http_cookies . SimpleCookie ) :
if not _cookie_encodes_correctly :
def value_encode ( self , val ) :
val , encoded = super ( SimpleCookie , self ) . value_encode ( val )
encoded = encoded . replace ( ";" , "\\073" ) . replace ( "," , "\\054" )
if "\\" in encoded and not encoded . startswith ( '"' ) :
encoded = '"' + encoded + '"'
return val , encoded
if not _cookie_allows_colon_in_names :
def load ( self , rawdata ) :
self . bad_cookies = set ( )
if six . PY2 and isinstance ( rawdata , six . text_type ) :
rawdata = force_str ( rawdata )
super ( SimpleCookie , self ) . load ( rawdata )
for key in self . bad_cookies :
del self [ key ]
def _BaseCookie__set ( self , key , real_value , coded_value ) :
key = force_str ( key )
try :
M = self . get ( key , Morsel ( ) )
M . set ( key , real_value , coded_value )
dict . __setitem__ ( self , key , M )
except http_cookies . CookieError :
if not hasattr ( self , 'bad_cookies' ) :
self . bad_cookies = set ( )
self . bad_cookies . add ( key )
dict . __setitem__ ( self , key , http_cookies . Morsel ( ) )
def parse_cookie ( cookie ) :
if cookie == '' :
return { }
if not isinstance ( cookie , http_cookies . BaseCookie ) :
try :
c = SimpleCookie ( )
c . load ( cookie )
except http_cookies . CookieError :
return { }
else :
c = cookie
cookiedict = { }
for key in c . keys ( ) :
cookiedict [ key ] = c . get ( key ) . value
return cookiedict
from __future__ import unicode_literals
import base64
import binascii
import cgi
import sys
from django . conf import settings
from django . core . exceptions import SuspiciousMultipartForm
from django . utils . datastructures import MultiValueDict
from django . utils . encoding import force_text
from django . utils import six
from django . utils . text import unescape_entities
from django . core . files . uploadhandler import StopUpload , SkipFile , StopFutureHandlers
__all__ = ( 'MultiPartParser' , 'MultiPartParserError' , 'InputStreamExhausted' )
class MultiPartParserError ( Exception ) :
pass
class InputStreamExhausted ( Exception ) :
pass
RAW = "raw"
FILE = "file"
FIELD = "field"
_BASE64_DECODE_ERROR = TypeError if six . PY2 else binascii . Error
class MultiPartParser ( object ) :
def __init__ ( self , META , input_data , upload_handlers , encoding = None ) :
content_type = META . get ( 'HTTP_CONTENT_TYPE' , META . get ( 'CONTENT_TYPE' , '' ) )
if not content_type . startswith ( 'multipart/' ) :
raise MultiPartParserError ( 'Invalid Content-Type: %s' % content_type )
ctypes , opts = parse_header ( content_type . encode ( 'ascii' ) )
boundary = opts . get ( 'boundary' )
if not boundary or not cgi . valid_boundary ( boundary ) :
raise MultiPartParserError ( 'Invalid boundary in multipart: %s' % boundary )
try :
content_length = int ( META . get ( 'HTTP_CONTENT_LENGTH' , META . get ( 'CONTENT_LENGTH' , 0 ) ) )
except ( ValueError , TypeError ) :
content_length = 0
if content_length < 0 :
raise MultiPartParserError ( "Invalid content length: %r" % content_length )
if isinstance ( boundary , six . text_type ) :
boundary = boundary . encode ( 'ascii' )
self . _boundary = boundary
self . _input_data = input_data
possible_sizes = [ x . chunk_size for x in upload_handlers if x . chunk_size ]
self . _chunk_size = min ( [ 2 ** 31 - 4 ] + possible_sizes )
self . _meta = META
self . _encoding = encoding or settings . DEFAULT_CHARSET
self . _content_length = content_length
self . _upload_handlers = upload_handlers
def parse ( self ) :
from django . http import QueryDict
encoding = self . _encoding
handlers = self . _upload_handlers
if self . _content_length == 0 :
return QueryDict ( '' , encoding = self . _encoding ) , MultiValueDict ( )
for handler in handlers :
result = handler . handle_raw_input ( self . _input_data , self . _meta , self . _content_length , self . _boundary , encoding )
if result is not None :
return result [ 0 ] , result [ 1 ]
self . _post = QueryDict ( '' , mutable = True )
self . _files = MultiValueDict ( )
stream = LazyStream ( ChunkIter ( self . _input_data , self . _chunk_size ) )
old_field_name = None
counters = [ 0 ] * len ( handlers )
try :
for item_type , meta_data , field_stream in Parser ( stream , self . _boundary ) :
if old_field_name :
self . handle_file_complete ( old_field_name , counters )
old_field_name = None
try :
disposition = meta_data [ 'content-disposition' ] [ 1 ]
field_name = disposition [ 'name' ] . strip ( )
except ( KeyError , IndexError , AttributeError ) :
continue
transfer_encoding = meta_data . get ( 'content-transfer-encoding' )
if transfer_encoding is not None :
transfer_encoding = transfer_encoding [ 0 ] . strip ( )
field_name = force_text ( field_name , encoding , errors = 'replace' )
if item_type == FIELD :
if transfer_encoding == 'base64' :
raw_data = field_stream . read ( )
try :
data = base64 . b64decode ( raw_data )
except _BASE64_DECODE_ERROR :
data = raw_data
else :
data = field_stream . read ( )
self . _post . appendlist ( field_name , force_text ( data , encoding , errors = 'replace' ) )
elif item_type == FILE :
file_name = disposition . get ( 'filename' )
if not file_name :
continue
file_name = force_text ( file_name , encoding , errors = 'replace' )
file_name = self . IE_sanitize ( unescape_entities ( file_name ) )
content_type , content_type_extra = meta_data . get ( 'content-type' , ( '' , { } ) )
content_type = content_type . strip ( )
charset = content_type_extra . get ( 'charset' )
try :
content_length = int ( meta_data . get ( 'content-length' ) [ 0 ] )
except ( IndexError , TypeError , ValueError ) :
content_length = None
counters = [ 0 ] * len ( handlers )
try :
for handler in handlers :
try :
handler . new_file ( field_name , file_name , content_type , content_length , charset , content_type_extra )
except StopFutureHandlers :
break
for chunk in field_stream :
if transfer_encoding == 'base64' :
over_bytes = len ( chunk ) % 4
if over_bytes :
over_chunk = field_stream . read ( 4 - over_bytes )
chunk += over_chunk
try :
chunk = base64 . b64decode ( chunk )
except Exception as e :
msg = "Could not decode base64 data: %r" % e
six . reraise ( MultiPartParserError , MultiPartParserError ( msg ) , sys . exc_info ( ) [ 2 ] )
for i , handler in enumerate ( handlers ) :
chunk_length = len ( chunk )
chunk = handler . receive_data_chunk ( chunk , counters [ i ] )
counters [ i ] += chunk_length
if chunk is None :
break
except SkipFile :
self . _close_files ( )
exhaust ( field_stream )
else :
old_field_name = field_name
else :
exhaust ( stream )
except StopUpload as e :
self . _close_files ( )
if not e . connection_reset :
exhaust ( self . _input_data )
else :
exhaust ( self . _input_data )
for handler in handlers :
retval = handler . upload_complete ( )
if retval :
break
return self . _post , self . _files
def handle_file_complete ( self , old_field_name , counters ) :
for i , handler in enumerate ( self . _upload_handlers ) :
file_obj = handler . file_complete ( counters [ i ] )
if file_obj :
self . _files . appendlist ( force_text ( old_field_name , self . _encoding , errors = 'replace' ) , file_obj )
break
def IE_sanitize ( self , filename ) :
return filename and filename [ filename . rfind ( "\\" ) + 1 : ] . strip ( )
def _close_files ( self ) :
for handler in self . _upload_handlers :
if hasattr ( handler , 'file' ) :
handler . file . close ( )
class LazyStream ( six . Iterator ) :
def __init__ ( self , producer , length = None ) :
self . _producer = producer
self . _empty = False
self . _leftover = b''
self . length = length
self . position = 0
self . _remaining = length
self . _unget_history = [ ]
def tell ( self ) :
return self . position
def read ( self , size = None ) :
def parts ( ) :
remaining = self . _remaining if size is None else size
if remaining is None :
yield b'' . join ( self )
return
while remaining != 0 :
assert remaining > 0 , 'remaining bytes to read should never go negative'
chunk = next ( self )
emitting = chunk [ : remaining ]
self . unget ( chunk [ remaining : ] )
remaining -= len ( emitting )
yield emitting
out = b'' . join ( parts ( ) )
return out
def __next__ ( self ) :
if self . _leftover :
output = self . _leftover
self . _leftover = b''
else :
output = next ( self . _producer )
self . _unget_history = [ ]
self . position += len ( output )
return output
def close ( self ) :
self . _producer = [ ]
def __iter__ ( self ) :
return self
def unget ( self , bytes ) :
if not bytes :
return
self . _update_unget_history ( len ( bytes ) )
self . position -= len ( bytes )
self . _leftover = b'' . join ( [ bytes , self . _leftover ] )
def _update_unget_history ( self , num_bytes ) :
self . _unget_history = [ num_bytes ] + self . _unget_history [ : 49 ]
number_equal = len ( [ current_number for current_number in self . _unget_history if current_number == num_bytes ] )
if number_equal > 40 :
raise SuspiciousMultipartForm ( "The multipart parser got stuck, which shouldn't happen with" " normal uploaded files. Check for malicious upload activity;" " if there is none, report this to the Django developers." )
class ChunkIter ( six . Iterator ) :
def __init__ ( self , flo , chunk_size = 64 * 1024 ) :
self . flo = flo
self . chunk_size = chunk_size
def __next__ ( self ) :
try :
data = self . flo . read ( self . chunk_size )
except InputStreamExhausted :
raise StopIteration ( )
if data :
return data
else :
raise StopIteration ( )
def __iter__ ( self ) :
return self
class InterBoundaryIter ( six . Iterator ) :
def __init__ ( self , stream , boundary ) :
self . _stream = stream
self . _boundary = boundary
def __iter__ ( self ) :
return self
def __next__ ( self ) :
try :
return LazyStream ( BoundaryIter ( self . _stream , self . _boundary ) )
except InputStreamExhausted :
raise StopIteration ( )
class BoundaryIter ( six . Iterator ) :
def __init__ ( self , stream , boundary ) :
self . _stream = stream
self . _boundary = boundary
self . _done = False
self . _rollback = len ( boundary ) + 6
unused_char = self . _stream . read ( 1 )
if not unused_char :
raise InputStreamExhausted ( )
self . _stream . unget ( unused_char )
def __iter__ ( self ) :
return self
def __next__ ( self ) :
if self . _done :
raise StopIteration ( )
stream = self . _stream
rollback = self . _rollback
bytes_read = 0
chunks = [ ]
for bytes in stream :
bytes_read += len ( bytes )
chunks . append ( bytes )
if bytes_read > rollback :
break
if not bytes :
break
else :
self . _done = True
if not chunks :
raise StopIteration ( )
chunk = b'' . join ( chunks )
boundary = self . _find_boundary ( chunk , len ( chunk ) < self . _rollback )
if boundary :
end , next = boundary
stream . unget ( chunk [ next : ] )
self . _done = True
return chunk [ : end ]
else :
if not chunk [ : - rollback ] :
self . _done = True
return chunk
else :
stream . unget ( chunk [ - rollback : ] )
return chunk [ : - rollback ]
def _find_boundary ( self , data , eof = False ) :
index = data . find ( self . _boundary )
if index < 0 :
return None
else :
end = index
next = index + len ( self . _boundary )
last = max ( 0 , end - 1 )
if data [ last : last + 1 ] == b'\n' :
end -= 1
last = max ( 0 , end - 1 )
if data [ last : last + 1 ] == b'\r' :
end -= 1
return end , next
def exhaust ( stream_or_iterable ) :
iterator = None
try :
iterator = iter ( stream_or_iterable )
except TypeError :
iterator = ChunkIter ( stream_or_iterable , 16384 )
if iterator is None :
raise MultiPartParserError ( 'multipartparser.exhaust() was passed a non-iterable or stream parameter' )
for __ in iterator :
pass
def parse_boundary_stream ( stream , max_header_size ) :
chunk = stream . read ( max_header_size )
header_end = chunk . find ( b'\r\n\r\n' )
def _parse_header ( line ) :
main_value_pair , params = parse_header ( line )
try :
name , value = main_value_pair . split ( ':' , 1 )
except ValueError :
raise ValueError ( "Invalid header: %r" % line )
return name , ( value , params )
if header_end == - 1 :
stream . unget ( chunk )
return ( RAW , { } , stream )
header = chunk [ : header_end ]
stream . unget ( chunk [ header_end + 4 : ] )
TYPE = RAW
outdict = { }
for line in header . split ( b'\r\n' ) :
try :
name , ( value , params ) = _parse_header ( line )
except ValueError :
continue
if name == 'content-disposition' :
TYPE = FIELD
if params . get ( 'filename' ) :
TYPE = FILE
outdict [ name ] = value , params
if TYPE == RAW :
stream . unget ( chunk )
return ( TYPE , outdict , stream )
class Parser ( object ) :
def __init__ ( self , stream , boundary ) :
self . _stream = stream
self . _separator = b'--' + boundary
def __iter__ ( self ) :
boundarystream = InterBoundaryIter ( self . _stream , self . _separator )
for sub_stream in boundarystream :
yield parse_boundary_stream ( sub_stream , 1024 )
def parse_header ( line ) :
plist = _parse_header_params ( b';' + line )
key = plist . pop ( 0 ) . lower ( ) . decode ( 'ascii' )
pdict = { }
for p in plist :
i = p . find ( b'=' )
if i >= 0 :
name = p [ : i ] . strip ( ) . lower ( ) . decode ( 'ascii' )
value = p [ i + 1 : ] . strip ( )
if len ( value ) >= 2 and value [ : 1 ] == value [ - 1 : ] == b'"' :
value = value [ 1 : - 1 ]
value = value . replace ( b'\\\\' , b'\\' ) . replace ( b'\\"' , b'"' )
pdict [ name ] = value
return key , pdict
def _parse_header_params ( s ) :
plist = [ ]
while s [ : 1 ] == b';' :
s = s [ 1 : ]
end = s . find ( b';' )
while end > 0 and s . count ( b'"' , 0 , end ) % 2 :
end = s . find ( b';' , end + 1 )
if end < 0 :
end = len ( s )
f = s [ : end ]
plist . append ( f . strip ( ) )
s = s [ end : ]
return plist
from __future__ import unicode_literals
import copy
import os
import re
import sys
from io import BytesIO
from itertools import chain
from pprint import pformat
from django . conf import settings
from django . core import signing
from django . core . exceptions import DisallowedHost , ImproperlyConfigured
from django . core . files import uploadhandler
from django . http . multipartparser import MultiPartParser , MultiPartParserError
from django . utils import six
from django . utils . datastructures import MultiValueDict , ImmutableList
from django . utils . encoding import force_bytes , force_text , force_str , iri_to_uri
from django . utils . six . moves . urllib . parse import parse_qsl , urlencode , quote , urljoin , urlsplit
RAISE_ERROR = object ( )
absolute_http_url_re = re . compile ( r"^https?://" , re . I )
host_validation_re = re . compile ( r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$" )
class UnreadablePostError ( IOError ) :
pass
class RawPostDataException ( Exception ) :
pass
class HttpRequest ( object ) :
_encoding = None
_upload_handlers = [ ]
def __init__ ( self ) :
self . GET = QueryDict ( mutable = True )
self . POST = QueryDict ( mutable = True )
self . COOKIES = { }
self . META = { }
self . FILES = MultiValueDict ( )
self . path = ''
self . path_info = ''
self . method = None
self . resolver_match = None
self . _post_parse_error = False
def __repr__ ( self ) :
return build_request_repr ( self )
def get_host ( self ) :
if settings . USE_X_FORWARDED_HOST and ( 'HTTP_X_FORWARDED_HOST' in self . META ) :
host = self . META [ 'HTTP_X_FORWARDED_HOST' ]
elif 'HTTP_HOST' in self . META :
host = self . META [ 'HTTP_HOST' ]
else :
host = self . META [ 'SERVER_NAME' ]
server_port = str ( self . META [ 'SERVER_PORT' ] )
if server_port != ( '443' if self . is_secure ( ) else '80' ) :
host = '%s:%s' % ( host , server_port )
if settings . DEBUG :
return host
domain , port = split_domain_port ( host )
if domain and validate_host ( domain , settings . ALLOWED_HOSTS ) :
return host
else :
msg = "Invalid HTTP_HOST header: %r." % host
if domain :
msg += " You may need to add %r to ALLOWED_HOSTS." % domain
else :
msg += " The domain name provided is not valid according to RFC 1034/1035."
raise DisallowedHost ( msg )
def get_full_path ( self ) :
return '%s%s' % ( self . path , ( '?' + iri_to_uri ( self . META . get ( 'QUERY_STRING' , '' ) ) ) if self . META . get ( 'QUERY_STRING' , '' ) else '' )
def get_signed_cookie ( self , key , default = RAISE_ERROR , salt = '' , max_age = None ) :
try :
cookie_value = self . COOKIES [ key ]
except KeyError :
if default is not RAISE_ERROR :
return default
else :
raise
try :
value = signing . get_cookie_signer ( salt = key + salt ) . unsign ( cookie_value , max_age = max_age )
except signing . BadSignature :
if default is not RAISE_ERROR :
return default
else :
raise
return value
def build_absolute_uri ( self , location = None ) :
if location is None :
location = '//%s' % self . get_full_path ( )
bits = urlsplit ( location )
if not ( bits . scheme and bits . netloc ) :
current_uri = '{scheme}://{host}{path}' . format ( scheme = self . scheme , host = self . get_host ( ) , path = self . path )
location = urljoin ( current_uri , location )
return iri_to_uri ( location )
def _get_scheme ( self ) :
return 'https' if os . environ . get ( "HTTPS" ) == "on" else 'http'
@ property
def scheme ( self ) :
if settings . SECURE_PROXY_SSL_HEADER :
try :
header , value = settings . SECURE_PROXY_SSL_HEADER
except ValueError :
raise ImproperlyConfigured ( 'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.' )
if self . META . get ( header , None ) == value :
return 'https'
return self . _get_scheme ( )
def is_secure ( self ) :
return self . scheme == 'https'
def is_ajax ( self ) :
return self . META . get ( 'HTTP_X_REQUESTED_WITH' ) == 'XMLHttpRequest'
@ property
def encoding ( self ) :
return self . _encoding
@ encoding . setter
def encoding ( self , val ) :
self . _encoding = val
if hasattr ( self , '_get' ) :
del self . _get
if hasattr ( self , '_post' ) :
del self . _post
def _initialize_handlers ( self ) :
self . _upload_handlers = [ uploadhandler . load_handler ( handler , self ) for handler in settings . FILE_UPLOAD_HANDLERS ]
@ property
def upload_handlers ( self ) :
if not self . _upload_handlers :
self . _initialize_handlers ( )
return self . _upload_handlers
@ upload_handlers . setter
def upload_handlers ( self , upload_handlers ) :
if hasattr ( self , '_files' ) :
raise AttributeError ( "You cannot set the upload handlers after the upload has been processed." )
self . _upload_handlers = upload_handlers
def parse_file_upload ( self , META , post_data ) :
self . upload_handlers = ImmutableList ( self . upload_handlers , warning = "You cannot alter upload handlers after the upload has been processed." )
parser = MultiPartParser ( META , post_data , self . upload_handlers , self . encoding )
return parser . parse ( )
@ property
def body ( self ) :
if not hasattr ( self , '_body' ) :
if self . _read_started :
raise RawPostDataException ( "You cannot access body after reading from request's data stream" )
try :
self . _body = self . read ( )
except IOError as e :
six . reraise ( UnreadablePostError , UnreadablePostError ( * e . args ) , sys . exc_info ( ) [ 2 ] )
self . _stream = BytesIO ( self . _body )
return self . _body
def _mark_post_parse_error ( self ) :
self . _post = QueryDict ( '' )
self . _files = MultiValueDict ( )
self . _post_parse_error = True
def _load_post_and_files ( self ) :
if self . method != 'POST' :
self . _post , self . _files = QueryDict ( '' , encoding = self . _encoding ) , MultiValueDict ( )
return
if self . _read_started and not hasattr ( self , '_body' ) :
self . _mark_post_parse_error ( )
return
if self . META . get ( 'CONTENT_TYPE' , '' ) . startswith ( 'multipart/form-data' ) :
if hasattr ( self , '_body' ) :
data = BytesIO ( self . _body )
else :
data = self
try :
self . _post , self . _files = self . parse_file_upload ( self . META , data )
except MultiPartParserError :
self . _mark_post_parse_error ( )
raise
elif self . META . get ( 'CONTENT_TYPE' , '' ) . startswith ( 'application/x-www-form-urlencoded' ) :
self . _post , self . _files = QueryDict ( self . body , encoding = self . _encoding ) , MultiValueDict ( )
else :
self . _post , self . _files = QueryDict ( '' , encoding = self . _encoding ) , MultiValueDict ( )
def close ( self ) :
if hasattr ( self , '_files' ) :
for f in chain . from_iterable ( l [ 1 ] for l in self . _files . lists ( ) ) :
f . close ( )
def read ( self , * args , ** kwargs ) :
self . _read_started = True
try :
return self . _stream . read ( * args , ** kwargs )
except IOError as e :
six . reraise ( UnreadablePostError , UnreadablePostError ( * e . args ) , sys . exc_info ( ) [ 2 ] )
def readline ( self , * args , ** kwargs ) :
self . _read_started = True
try :
return self . _stream . readline ( * args , ** kwargs )
except IOError as e :
six . reraise ( UnreadablePostError , UnreadablePostError ( * e . args ) , sys . exc_info ( ) [ 2 ] )
def xreadlines ( self ) :
while True :
buf = self . readline ( )
if not buf :
break
yield buf
__iter__ = xreadlines
def readlines ( self ) :
return list ( iter ( self ) )
class QueryDict ( MultiValueDict ) :
_mutable = True
_encoding = None
def __init__ ( self , query_string = None , mutable = False , encoding = None ) :
super ( QueryDict , self ) . __init__ ( )
if not encoding :
encoding = settings . DEFAULT_CHARSET
self . encoding = encoding
if six . PY3 :
if isinstance ( query_string , bytes ) :
query_string = query_string . decode ( )
for key , value in parse_qsl ( query_string or '' , keep_blank_values = True , encoding = encoding ) :
self . appendlist ( key , value )
else :
for key , value in parse_qsl ( query_string or '' , keep_blank_values = True ) :
self . appendlist ( force_text ( key , encoding , errors = 'replace' ) , force_text ( value , encoding , errors = 'replace' ) )
self . _mutable = mutable
@ property
def encoding ( self ) :
if self . _encoding is None :
self . _encoding = settings . DEFAULT_CHARSET
return self . _encoding
@ encoding . setter
def encoding ( self , value ) :
self . _encoding = value
def _assert_mutable ( self ) :
if not self . _mutable :
raise AttributeError ( "This QueryDict instance is immutable" )
def __setitem__ ( self , key , value ) :
self . _assert_mutable ( )
key = bytes_to_text ( key , self . encoding )
value = bytes_to_text ( value , self . encoding )
super ( QueryDict , self ) . __setitem__ ( key , value )
def __delitem__ ( self , key ) :
self . _assert_mutable ( )
super ( QueryDict , self ) . __delitem__ ( key )
def __copy__ ( self ) :
result = self . __class__ ( '' , mutable = True , encoding = self . encoding )
for key , value in six . iterlists ( self ) :
result . setlist ( key , value )
return result
def __deepcopy__ ( self , memo ) :
result = self . __class__ ( '' , mutable = True , encoding = self . encoding )
memo [ id ( self ) ] = result
for key , value in six . iterlists ( self ) :
result . setlist ( copy . deepcopy ( key , memo ) , copy . deepcopy ( value , memo ) )
return result
def setlist ( self , key , list_ ) :
self . _assert_mutable ( )
key = bytes_to_text ( key , self . encoding )
list_ = [ bytes_to_text ( elt , self . encoding ) for elt in list_ ]
super ( QueryDict , self ) . setlist ( key , list_ )
def setlistdefault ( self , key , default_list = None ) :
self . _assert_mutable ( )
return super ( QueryDict , self ) . setlistdefault ( key , default_list )
def appendlist ( self , key , value ) :
self . _assert_mutable ( )
key = bytes_to_text ( key , self . encoding )
value = bytes_to_text ( value , self . encoding )
super ( QueryDict , self ) . appendlist ( key , value )
def pop ( self , key , * args ) :
self . _assert_mutable ( )
return super ( QueryDict , self ) . pop ( key , * args )
def popitem ( self ) :
self . _assert_mutable ( )
return super ( QueryDict , self ) . popitem ( )
def clear ( self ) :
self . _assert_mutable ( )
super ( QueryDict , self ) . clear ( )
def setdefault ( self , key , default = None ) :
self . _assert_mutable ( )
key = bytes_to_text ( key , self . encoding )
default = bytes_to_text ( default , self . encoding )
return super ( QueryDict , self ) . setdefault ( key , default )
def copy ( self ) :
return self . __deepcopy__ ( { } )
def urlencode ( self , safe = None ) :
output = [ ]
if safe :
safe = force_bytes ( safe , self . encoding )
encode = lambda k , v : '%s=%s' % ( ( quote ( k , safe ) , quote ( v , safe ) ) )
else :
encode = lambda k , v : urlencode ( { k : v } )
for k , list_ in self . lists ( ) :
k = force_bytes ( k , self . encoding )
output . extend ( [ encode ( k , force_bytes ( v , self . encoding ) ) for v in list_ ] )
return '&' . join ( output )
def build_request_repr ( request , path_override = None , GET_override = None , POST_override = None , COOKIES_override = None , META_override = None ) :
try :
get = ( pformat ( GET_override ) if GET_override is not None else pformat ( request . GET ) )
except Exception :
get = ''
if request . _post_parse_error :
post = ''
else :
try :
post = ( pformat ( POST_override ) if POST_override is not None else pformat ( request . POST ) )
except Exception :
post = ''
try :
cookies = ( pformat ( COOKIES_override ) if COOKIES_override is not None else pformat ( request . COOKIES ) )
except Exception :
cookies = ''
try :
meta = ( pformat ( META_override ) if META_override is not None else pformat ( request . META ) )
except Exception :
meta = ''
path = path_override if path_override is not None else request . path
return force_str ( '<%s\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % ( request . __class__ . __name__ , path , six . text_type ( get ) , six . text_type ( post ) , six . text_type ( cookies ) , six . text_type ( meta ) ) )
def bytes_to_text ( s , encoding ) :
if isinstance ( s , bytes ) :
return six . text_type ( s , encoding , 'replace' )
else :
return s
def split_domain_port ( host ) :
host = host . lower ( )
if not host_validation_re . match ( host ) :
return '' , ''
if host [ - 1 ] == ']' :
return host , ''
bits = host . rsplit ( ':' , 1 )
if len ( bits ) == 2 :
return tuple ( bits )
return bits [ 0 ] , ''
def validate_host ( host , allowed_hosts ) :
host = host [ : - 1 ] if host . endswith ( '.' ) else host
for pattern in allowed_hosts :
pattern = pattern . lower ( )
match = ( pattern == '*' or pattern . startswith ( '.' ) and ( host . endswith ( pattern ) or host == pattern [ 1 : ] ) or pattern == host )
if match :
return True
return False
from __future__ import unicode_literals
import datetime
import json
import sys
import time
from email . header import Header
from django . conf import settings
from django . core import signals
from django . core import signing
from django . core . exceptions import DisallowedRedirect
from django . core . serializers . json import DjangoJSONEncoder
from django . http . cookie import SimpleCookie
from django . utils import six , timezone
from django . utils . encoding import force_bytes , force_text , iri_to_uri
from django . utils . http import cookie_date
from django . utils . six . moves import map
from django . utils . six . moves . urllib . parse import urlparse
REASON_PHRASES = { 100 : 'CONTINUE' , 101 : 'SWITCHING PROTOCOLS' , 102 : 'PROCESSING' , 200 : 'OK' , 201 : 'CREATED' , 202 : 'ACCEPTED' , 203 : 'NON-AUTHORITATIVE INFORMATION' , 204 : 'NO CONTENT' , 205 : 'RESET CONTENT' , 206 : 'PARTIAL CONTENT' , 207 : 'MULTI-STATUS' , 208 : 'ALREADY REPORTED' , 226 : 'IM USED' , 300 : 'MULTIPLE CHOICES' , 301 : 'MOVED PERMANENTLY' , 302 : 'FOUND' , 303 : 'SEE OTHER' , 304 : 'NOT MODIFIED' , 305 : 'USE PROXY' , 306 : 'RESERVED' , 307 : 'TEMPORARY REDIRECT' , 308 : 'PERMANENT REDIRECT' , 400 : 'BAD REQUEST' , 401 : 'UNAUTHORIZED' , 402 : 'PAYMENT REQUIRED' , 403 : 'FORBIDDEN' , 404 : 'NOT FOUND' , 405 : 'METHOD NOT ALLOWED' , 406 : 'NOT ACCEPTABLE' , 407 : 'PROXY AUTHENTICATION REQUIRED' , 408 : 'REQUEST TIMEOUT' , 409 : 'CONFLICT' , 410 : 'GONE' , 411 : 'LENGTH REQUIRED' , 412 : 'PRECONDITION FAILED' , 413 : 'REQUEST ENTITY TOO LARGE' , 414 : 'REQUEST-URI TOO LONG' , 415 : 'UNSUPPORTED MEDIA TYPE' , 416 : 'REQUESTED RANGE NOT SATISFIABLE' , 417 : 'EXPECTATION FAILED' , 418 : "I'M A TEAPOT" , 422 : 'UNPROCESSABLE ENTITY' , 423 : 'LOCKED' , 424 : 'FAILED DEPENDENCY' , 426 : 'UPGRADE REQUIRED' , 428 : 'PRECONDITION REQUIRED' , 429 : 'TOO MANY REQUESTS' , 431 : 'REQUEST HEADER FIELDS TOO LARGE' , 500 : 'INTERNAL SERVER ERROR' , 501 : 'NOT IMPLEMENTED' , 502 : 'BAD GATEWAY' , 503 : 'SERVICE UNAVAILABLE' , 504 : 'GATEWAY TIMEOUT' , 505 : 'HTTP VERSION NOT SUPPORTED' , 506 : 'VARIANT ALSO NEGOTIATES' , 507 : 'INSUFFICIENT STORAGE' , 508 : 'LOOP DETECTED' , 510 : 'NOT EXTENDED' , 511 : 'NETWORK AUTHENTICATION REQUIRED' , }
class BadHeaderError ( ValueError ) :
pass
class HttpResponseBase ( six . Iterator ) :
status_code = 200
reason_phrase = None
def __init__ ( self , content_type = None , status = None , reason = None ) :
self . _headers = { }
self . _charset = settings . DEFAULT_CHARSET
self . _closable_objects = [ ]
self . _handler_class = None
if not content_type :
content_type = "%s; charset=%s" % ( settings . DEFAULT_CONTENT_TYPE , self . _charset )
self . cookies = SimpleCookie ( )
if status is not None :
self . status_code = status
if reason is not None :
self . reason_phrase = reason
elif self . reason_phrase is None :
self . reason_phrase = REASON_PHRASES . get ( self . status_code , 'UNKNOWN STATUS CODE' )
self [ 'Content-Type' ] = content_type
def serialize_headers ( self ) :
def to_bytes ( val , encoding ) :
return val if isinstance ( val , bytes ) else val . encode ( encoding )
headers = [ ( b': ' . join ( [ to_bytes ( key , 'ascii' ) , to_bytes ( value , 'latin-1' ) ] ) ) for key , value in self . _headers . values ( ) ]
return b'\r\n' . join ( headers )
if six . PY3 :
__bytes__ = serialize_headers
else :
__str__ = serialize_headers
def _convert_to_charset ( self , value , charset , mime_encode = False ) :
if not isinstance ( value , ( bytes , six . text_type ) ) :
value = str ( value )
try :
if six . PY3 :
if isinstance ( value , str ) :
value . encode ( charset )
else :
value = value . decode ( charset )
else :
if isinstance ( value , str ) :
value . decode ( charset )
else :
value = value . encode ( charset )
except UnicodeError as e :
if mime_encode :
value = str ( Header ( value , 'utf-8' , maxlinelen = sys . maxsize ) . encode ( ) )
else :
e . reason += ', HTTP response headers must be in %s format' % charset
raise
if str ( '\n' ) in value or str ( '\r' ) in value :
raise BadHeaderError ( "Header values can't contain newlines (got %r)" % value )
return value
def __setitem__ ( self , header , value ) :
header = self . _convert_to_charset ( header , 'ascii' )
value = self . _convert_to_charset ( value , 'latin-1' , mime_encode = True )
self . _headers [ header . lower ( ) ] = ( header , value )
def __delitem__ ( self , header ) :
try :
del self . _headers [ header . lower ( ) ]
except KeyError :
pass
def __getitem__ ( self , header ) :
return self . _headers [ header . lower ( ) ] [ 1 ]
def __getstate__ ( self ) :
state = self . __dict__ . copy ( )
state [ 'cookies' ] = str ( state [ 'cookies' ] )
return state
def __setstate__ ( self , state ) :
self . __dict__ . update ( state )
self . cookies = SimpleCookie ( self . cookies )
def has_header ( self , header ) :
return header . lower ( ) in self . _headers
__contains__ = has_header
def items ( self ) :
return self . _headers . values ( )
def get ( self , header , alternate = None ) :
return self . _headers . get ( header . lower ( ) , ( None , alternate ) ) [ 1 ]
def set_cookie ( self , key , value = '' , max_age = None , expires = None , path = '/' , domain = None , secure = False , httponly = False ) :
self . cookies [ key ] = value
if expires is not None :
if isinstance ( expires , datetime . datetime ) :
if timezone . is_aware ( expires ) :
expires = timezone . make_naive ( expires , timezone . utc )
delta = expires - expires . utcnow ( )
delta = delta + datetime . timedelta ( seconds = 1 )
expires = None
max_age = max ( 0 , delta . days * 86400 + delta . seconds )
else :
self . cookies [ key ] [ 'expires' ] = expires
if max_age is not None :
self . cookies [ key ] [ 'max-age' ] = max_age
if not expires :
self . cookies [ key ] [ 'expires' ] = cookie_date ( time . time ( ) + max_age )
if path is not None :
self . cookies [ key ] [ 'path' ] = path
if domain is not None :
self . cookies [ key ] [ 'domain' ] = domain
if secure :
self . cookies [ key ] [ 'secure' ] = True
if httponly :
self . cookies [ key ] [ 'httponly' ] = True
def set_signed_cookie ( self , key , value , salt = '' , ** kwargs ) :
value = signing . get_cookie_signer ( salt = key + salt ) . sign ( value )
return self . set_cookie ( key , value , ** kwargs )
def delete_cookie ( self , key , path = '/' , domain = None ) :
self . set_cookie ( key , max_age = 0 , path = path , domain = domain , expires = 'Thu, 01-Jan-1970 00:00:00 GMT' )
def make_bytes ( self , value ) :
if self . has_header ( 'Content-Encoding' ) :
return bytes ( value )
if isinstance ( value , bytes ) :
return bytes ( value )
if isinstance ( value , six . text_type ) :
return bytes ( value . encode ( self . _charset ) )
return force_bytes ( value , self . _charset )
def close ( self ) :
for closable in self . _closable_objects :
try :
closable . close ( )
except Exception :
pass
signals . request_finished . send ( sender = self . _handler_class )
def write ( self , content ) :
raise Exception ( "This %s instance is not writable" % self . __class__ . __name__ )
def flush ( self ) :
pass
def tell ( self ) :
raise Exception ( "This %s instance cannot tell its position" % self . __class__ . __name__ )
class HttpResponse ( HttpResponseBase ) :
streaming = False
def __init__ ( self , content = b'' , * args , ** kwargs ) :
super ( HttpResponse , self ) . __init__ ( * args , ** kwargs )
self . content = content
def serialize ( self ) :
return self . serialize_headers ( ) + b'\r\n\r\n' + self . content
if six . PY3 :
__bytes__ = serialize
else :
__str__ = serialize
@ property
def content ( self ) :
return b'' . join ( self . _container )
@ content . setter
def content ( self , value ) :
if hasattr ( value , '__iter__' ) and not isinstance ( value , ( bytes , six . string_types ) ) :
if hasattr ( value , 'close' ) :
self . _closable_objects . append ( value )
value = b'' . join ( self . make_bytes ( chunk ) for chunk in value )
else :
value = self . make_bytes ( value )
self . _container = [ value ]
def __iter__ ( self ) :
return iter ( self . _container )
def write ( self , content ) :
self . _container . append ( self . make_bytes ( content ) )
def tell ( self ) :
return len ( self . content )
class StreamingHttpResponse ( HttpResponseBase ) :
streaming = True
def __init__ ( self , streaming_content = ( ) , * args , ** kwargs ) :
super ( StreamingHttpResponse , self ) . __init__ ( * args , ** kwargs )
self . streaming_content = streaming_content
@ property
def content ( self ) :
raise AttributeError ( "This %s instance has no `content` attribute. " "Use `streaming_content` instead." % self . __class__ . __name__ )
@ property
def streaming_content ( self ) :
return map ( self . make_bytes , self . _iterator )
@ streaming_content . setter
def streaming_content ( self , value ) :
self . _iterator = iter ( value )
if hasattr ( value , 'close' ) :
self . _closable_objects . append ( value )
def __iter__ ( self ) :
return self . streaming_content
class HttpResponseRedirectBase ( HttpResponse ) :
allowed_schemes = [ 'http' , 'https' , 'ftp' ]
def __init__ ( self , redirect_to , * args , ** kwargs ) :
parsed = urlparse ( force_text ( redirect_to ) )
if parsed . scheme and parsed . scheme not in self . allowed_schemes :
raise DisallowedRedirect ( "Unsafe redirect to URL with protocol '%s'" % parsed . scheme )
super ( HttpResponseRedirectBase , self ) . __init__ ( * args , ** kwargs )
self [ 'Location' ] = iri_to_uri ( redirect_to )
url = property ( lambda self : self [ 'Location' ] )
class HttpResponseRedirect ( HttpResponseRedirectBase ) :
status_code = 302
class HttpResponsePermanentRedirect ( HttpResponseRedirectBase ) :
status_code = 301
class HttpResponseNotModified ( HttpResponse ) :
status_code = 304
def __init__ ( self , * args , ** kwargs ) :
super ( HttpResponseNotModified , self ) . __init__ ( * args , ** kwargs )
del self [ 'content-type' ]
@ HttpResponse . content . setter
def content ( self , value ) :
if value :
raise AttributeError ( "You cannot set content to a 304 (Not Modified) response" )
self . _container = [ ]
class HttpResponseBadRequest ( HttpResponse ) :
status_code = 400
class HttpResponseNotFound ( HttpResponse ) :
status_code = 404
class HttpResponseForbidden ( HttpResponse ) :
status_code = 403
class HttpResponseNotAllowed ( HttpResponse ) :
status_code = 405
def __init__ ( self , permitted_methods , * args , ** kwargs ) :
super ( HttpResponseNotAllowed , self ) . __init__ ( * args , ** kwargs )
self [ 'Allow' ] = ', ' . join ( permitted_methods )
class HttpResponseGone ( HttpResponse ) :
status_code = 410
class HttpResponseServerError ( HttpResponse ) :
status_code = 500
class Http404 ( Exception ) :
pass
class JsonResponse ( HttpResponse ) :
def __init__ ( self , data , encoder = DjangoJSONEncoder , safe = True , ** kwargs ) :
if safe and not isinstance ( data , dict ) :
raise TypeError ( 'In order to allow non-dict objects to be ' 'serialized set the safe parameter to False' )
kwargs . setdefault ( 'content_type' , 'application/json' )
data = json . dumps ( data , cls = encoder )
super ( JsonResponse , self ) . __init__ ( content = data , ** kwargs )
def fix_location_header ( request , response ) :
if 'Location' in response and request . get_host ( ) :
response [ 'Location' ] = request . build_absolute_uri ( response [ 'Location' ] )
return response
def conditional_content_removal ( request , response ) :
if 100 <= response . status_code < 200 or response . status_code in ( 204 , 304 ) :
if response . streaming :
response . streaming_content = [ ]
else :
response . content = b''
response [ 'Content-Length' ] = '0'
if request . method == 'HEAD' :
if response . streaming :
response . streaming_content = [ ]
else :
response . content = b''
return response
from django . template . base import ( ALLOWED_VARIABLE_CHARS , BLOCK_TAG_END , BLOCK_TAG_START , COMMENT_TAG_END , COMMENT_TAG_START , FILTER_ARGUMENT_SEPARATOR , FILTER_SEPARATOR , SINGLE_BRACE_END , SINGLE_BRACE_START , TOKEN_BLOCK , TOKEN_COMMENT , TOKEN_TEXT , TOKEN_VAR , TRANSLATOR_COMMENT_MARK , UNKNOWN_SOURCE , VARIABLE_ATTRIBUTE_SEPARATOR , VARIABLE_TAG_END , VARIABLE_TAG_START , filter_re , tag_re )
from django . template . base import ( ContextPopException , InvalidTemplateLibrary , TemplateDoesNotExist , TemplateEncodingError , TemplateSyntaxError , VariableDoesNotExist )
from django . template . base import ( Context , FilterExpression , Lexer , Node , NodeList , Parser , RequestContext , Origin , StringOrigin , Template , TextNode , Token , TokenParser , Variable , VariableNode , constant_string , filter_raw_string )
from django . template . base import ( compile_string , resolve_variable , unescape_string_literal , generic_tag_compiler )
from django . template . base import ( Library , add_to_builtins , builtins , get_library , get_templatetags_modules , get_text_list , import_library , libraries )
__all__ = ( 'Template' , 'Context' , 'RequestContext' , 'compile_string' )
from __future__ import unicode_literals
import re
from functools import partial
from importlib import import_module
from inspect import getargspec , getcallargs
import warnings
from django . apps import apps
from django . conf import settings
from django . template . context import ( BaseContext , Context , RequestContext , ContextPopException )
from django . utils . deprecation import RemovedInDjango20Warning
from django . utils . itercompat import is_iterable
from django . utils . text import ( smart_split , unescape_string_literal , get_text_list )
from django . utils . encoding import force_str , force_text
from django . utils . translation import ugettext_lazy , pgettext_lazy
from django . utils . safestring import ( SafeData , EscapeData , mark_safe , mark_for_escaping )
from django . utils . formats import localize
from django . utils . html import escape
from django . utils . module_loading import module_has_submodule
from django . utils import six
from django . utils . timezone import template_localtime
from django . utils . encoding import python_2_unicode_compatible
TOKEN_TEXT = 0
TOKEN_VAR = 1
TOKEN_BLOCK = 2
TOKEN_COMMENT = 3
TOKEN_MAPPING = { TOKEN_TEXT : 'Text' , TOKEN_VAR : 'Var' , TOKEN_BLOCK : 'Block' , TOKEN_COMMENT : 'Comment' , }
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
TRANSLATOR_COMMENT_MARK = 'Translators'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
ALLOWED_VARIABLE_CHARS = ( 'abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.' )
UNKNOWN_SOURCE = ''
tag_re = ( re . compile ( '(%s.*?%s|%s.*?%s|%s.*?%s)' % ( re . escape ( BLOCK_TAG_START ) , re . escape ( BLOCK_TAG_END ) , re . escape ( VARIABLE_TAG_START ) , re . escape ( VARIABLE_TAG_END ) , re . escape ( COMMENT_TAG_START ) , re . escape ( COMMENT_TAG_END ) ) ) )
libraries = { }
builtins = [ ]
invalid_var_format_string = None
class TemplateSyntaxError ( Exception ) :
pass
class TemplateDoesNotExist ( Exception ) :
pass
class TemplateEncodingError ( Exception ) :
pass
@ python_2_unicode_compatible
class VariableDoesNotExist ( Exception ) :
def __init__ ( self , msg , params = ( ) ) :
self . msg = msg
self . params = params
def __str__ ( self ) :
return self . msg % tuple ( force_text ( p , errors = 'replace' ) for p in self . params )
class InvalidTemplateLibrary ( Exception ) :
pass
class Origin ( object ) :
def __init__ ( self , name ) :
self . name = name
def reload ( self ) :
raise NotImplementedError ( 'subclasses of Origin must provide a reload() method' )
def __str__ ( self ) :
return self . name
class StringOrigin ( Origin ) :
def __init__ ( self , source ) :
super ( StringOrigin , self ) . __init__ ( UNKNOWN_SOURCE )
self . source = source
def reload ( self ) :
return self . source
class Template ( object ) :
def __init__ ( self , template_string , origin = None , name = None ) :
try :
template_string = force_text ( template_string )
except UnicodeDecodeError :
raise TemplateEncodingError ( "Templates can only be constructed " "from unicode or UTF-8 strings." )
if settings . TEMPLATE_DEBUG and origin is None :
origin = StringOrigin ( template_string )
self . nodelist = compile_string ( template_string , origin )
self . name = name
self . origin = origin
def __iter__ ( self ) :
for node in self . nodelist :
for subnode in node :
yield subnode
def _render ( self , context ) :
return self . nodelist . render ( context )
def render ( self , context ) :
context . render_context . push ( )
try :
return self . _render ( context )
finally :
context . render_context . pop ( )
def compile_string ( template_string , origin ) :
if settings . TEMPLATE_DEBUG :
from django . template . debug import DebugLexer , DebugParser
lexer_class , parser_class = DebugLexer , DebugParser
else :
lexer_class , parser_class = Lexer , Parser
lexer = lexer_class ( template_string , origin )
parser = parser_class ( lexer . tokenize ( ) )
return parser . parse ( )
class Token ( object ) :
def __init__ ( self , token_type , contents ) :
self . token_type , self . contents = token_type , contents
self . lineno = None
def __str__ ( self ) :
token_name = TOKEN_MAPPING [ self . token_type ]
return ( '<%s token: "%s...">' % ( token_name , self . contents [ : 20 ] . replace ( '\n' , '' ) ) )
def split_contents ( self ) :
split = [ ]
bits = iter ( smart_split ( self . contents ) )
for bit in bits :
if bit . startswith ( '_("' ) or bit . startswith ( "_('" ) :
sentinal = bit [ 2 ] + ')'
trans_bit = [ bit ]
while not bit . endswith ( sentinal ) :
bit = next ( bits )
trans_bit . append ( bit )
bit = ' ' . join ( trans_bit )
split . append ( bit )
return split
class Lexer ( object ) :
def __init__ ( self , template_string , origin ) :
self . template_string = template_string
self . origin = origin
self . lineno = 1
self . verbatim = False
def tokenize ( self ) :
in_tag = False
result = [ ]
for bit in tag_re . split ( self . template_string ) :
if bit :
result . append ( self . create_token ( bit , in_tag ) )
in_tag = not in_tag
return result
def create_token ( self , token_string , in_tag ) :
if in_tag and token_string . startswith ( BLOCK_TAG_START ) :
block_content = token_string [ 2 : - 2 ] . strip ( )
if self . verbatim and block_content == self . verbatim :
self . verbatim = False
if in_tag and not self . verbatim :
if token_string . startswith ( VARIABLE_TAG_START ) :
token = Token ( TOKEN_VAR , token_string [ 2 : - 2 ] . strip ( ) )
elif token_string . startswith ( BLOCK_TAG_START ) :
if block_content [ : 9 ] in ( 'verbatim' , 'verbatim ' ) :
self . verbatim = 'end%s' % block_content
token = Token ( TOKEN_BLOCK , block_content )
elif token_string . startswith ( COMMENT_TAG_START ) :
content = ''
if token_string . find ( TRANSLATOR_COMMENT_MARK ) :
content = token_string [ 2 : - 2 ] . strip ( )
token = Token ( TOKEN_COMMENT , content )
else :
token = Token ( TOKEN_TEXT , token_string )
token . lineno = self . lineno
self . lineno += token_string . count ( '\n' )
return token
class Parser ( object ) :
def __init__ ( self , tokens ) :
self . tokens = tokens
self . tags = { }
self . filters = { }
for lib in builtins :
self . add_library ( lib )
def parse ( self , parse_until = None ) :
if parse_until is None :
parse_until = [ ]
nodelist = self . create_nodelist ( )
while self . tokens :
token = self . next_token ( )
if token . token_type == 0 :
self . extend_nodelist ( nodelist , TextNode ( token . contents ) , token )
elif token . token_type == 1 :
if not token . contents :
self . empty_variable ( token )
try :
filter_expression = self . compile_filter ( token . contents )
except TemplateSyntaxError as e :
if not self . compile_filter_error ( token , e ) :
raise
var_node = self . create_variable_node ( filter_expression )
self . extend_nodelist ( nodelist , var_node , token )
elif token . token_type == 2 :
try :
command = token . contents . split ( ) [ 0 ]
except IndexError :
self . empty_block_tag ( token )
if command in parse_until :
self . prepend_token ( token )
return nodelist
self . enter_command ( command , token )
try :
compile_func = self . tags [ command ]
except KeyError :
self . invalid_block_tag ( token , command , parse_until )
try :
compiled_result = compile_func ( self , token )
except TemplateSyntaxError as e :
if not self . compile_function_error ( token , e ) :
raise
self . extend_nodelist ( nodelist , compiled_result , token )
self . exit_command ( )
if parse_until :
self . unclosed_block_tag ( parse_until )
return nodelist
def skip_past ( self , endtag ) :
while self . tokens :
token = self . next_token ( )
if token . token_type == TOKEN_BLOCK and token . contents == endtag :
return
self . unclosed_block_tag ( [ endtag ] )
def create_variable_node ( self , filter_expression ) :
return VariableNode ( filter_expression )
def create_nodelist ( self ) :
return NodeList ( )
def extend_nodelist ( self , nodelist , node , token ) :
if node . must_be_first and nodelist :
try :
if nodelist . contains_nontext :
raise AttributeError
except AttributeError :
raise TemplateSyntaxError ( "%r must be the first tag " "in the template." % node )
if isinstance ( nodelist , NodeList ) and not isinstance ( node , TextNode ) :
nodelist . contains_nontext = True
nodelist . append ( node )
def enter_command ( self , command , token ) :
pass
def exit_command ( self ) :
pass
def error ( self , token , msg ) :
return TemplateSyntaxError ( msg )
def empty_variable ( self , token ) :
raise self . error ( token , "Empty variable tag" )
def empty_block_tag ( self , token ) :
raise self . error ( token , "Empty block tag" )
def invalid_block_tag ( self , token , command , parse_until = None ) :
if parse_until :
raise self . error ( token , "Invalid block tag: '%s', expected %s" % ( command , get_text_list ( [ "'%s'" % p for p in parse_until ] ) ) )
raise self . error ( token , "Invalid block tag: '%s'" % command )
def unclosed_block_tag ( self , parse_until ) :
raise self . error ( None , "Unclosed tags: %s " % ', ' . join ( parse_until ) )
def compile_filter_error ( self , token , e ) :
pass
def compile_function_error ( self , token , e ) :
pass
def next_token ( self ) :
return self . tokens . pop ( 0 )
def prepend_token ( self , token ) :
self . tokens . insert ( 0 , token )
def delete_first_token ( self ) :
del self . tokens [ 0 ]
def add_library ( self , lib ) :
self . tags . update ( lib . tags )
self . filters . update ( lib . filters )
def compile_filter ( self , token ) :
return FilterExpression ( token , self )
def find_filter ( self , filter_name ) :
if filter_name in self . filters :
return self . filters [ filter_name ]
else :
raise TemplateSyntaxError ( "Invalid filter: '%s'" % filter_name )
class TokenParser ( object ) :
def __init__ ( self , subject ) :
self . subject = subject
self . pointer = 0
self . backout = [ ]
self . tagname = self . tag ( )
def top ( self ) :
raise NotImplementedError ( 'subclasses of Tokenparser must provide a top() method' )
def more ( self ) :
return self . pointer < len ( self . subject )
def back ( self ) :
if not len ( self . backout ) :
raise TemplateSyntaxError ( "back called without some previous " "parsing" )
self . pointer = self . backout . pop ( )
def tag ( self ) :
subject = self . subject
i = self . pointer
if i >= len ( subject ) :
raise TemplateSyntaxError ( "expected another tag, found " "end of string: %s" % subject )
p = i
while i < len ( subject ) and subject [ i ] not in ( ' ' , '\t' ) :
i += 1
s = subject [ p : i ]
while i < len ( subject ) and subject [ i ] in ( ' ' , '\t' ) :
i += 1
self . backout . append ( self . pointer )
self . pointer = i
return s
def value ( self ) :
subject = self . subject
i = self . pointer
def next_space_index ( subject , i ) :
while i < len ( subject ) and subject [ i ] not in ( ' ' , '\t' ) :
if subject [ i ] in ( '"' , "'" ) :
c = subject [ i ]
i += 1
while i < len ( subject ) and subject [ i ] != c :
i += 1
if i >= len ( subject ) :
raise TemplateSyntaxError ( "Searching for value. " "Unexpected end of string in column %d: %s" % ( i , subject ) )
i += 1
return i
if i >= len ( subject ) :
raise TemplateSyntaxError ( "Searching for value. Expected another " "value but found end of string: %s" % subject )
if subject [ i ] in ( '"' , "'" ) :
p = i
i += 1
while i < len ( subject ) and subject [ i ] != subject [ p ] :
i += 1
if i >= len ( subject ) :
raise TemplateSyntaxError ( "Searching for value. Unexpected " "end of string in column %d: %s" % ( i , subject ) )
i += 1
i = next_space_index ( subject , i )
res = subject [ p : i ]
while i < len ( subject ) and subject [ i ] in ( ' ' , '\t' ) :
i += 1
self . backout . append ( self . pointer )
self . pointer = i
return res
else :
p = i
i = next_space_index ( subject , i )
s = subject [ p : i ]
while i < len ( subject ) and subject [ i ] in ( ' ' , '\t' ) :
i += 1
self . backout . append ( self . pointer )
self . pointer = i
return s
constant_string = r""" (?:%(i18n_open)s%(strdq)s%(i18n_close)s| %(i18n_open)s%(strsq)s%(i18n_close)s| %(strdq)s| %(strsq)s) """ % { 'strdq' : r'"[^"\\]*(?:\\.[^"\\]*)*"' , 'strsq' : r"'[^'\\]*(?:\\.[^'\\]*)*'" , 'i18n_open' : re . escape ( "_(" ) , 'i18n_close' : re . escape ( ")" ) , }
constant_string = constant_string . replace ( "\n" , "" )
filter_raw_string = r""" ^(?P%(constant)s)| ^(?P[%(var_chars)s]+|%(num)s)| (?:\s*%(filter_sep)s\s* (?P\w+) (?:%(arg_sep)s (?: (?P%(constant)s)| (?P[%(var_chars)s]+|%(num)s) ) )? )""" % { 'constant' : constant_string , 'num' : r'[-+\.]?\d[\d\.e]*' , 'var_chars' : "\w\." , 'filter_sep' : re . escape ( FILTER_SEPARATOR ) , 'arg_sep' : re . escape ( FILTER_ARGUMENT_SEPARATOR ) , }
filter_re = re . compile ( filter_raw_string , re . UNICODE | re . VERBOSE )
class FilterExpression ( object ) :
def __init__ ( self , token , parser ) :
self . token = token
matches = filter_re . finditer ( token )
var_obj = None
filters = [ ]
upto = 0
for match in matches :
start = match . start ( )
if upto != start :
raise TemplateSyntaxError ( "Could not parse some characters: " "%s|%s|%s" % ( token [ : upto ] , token [ upto : start ] , token [ start : ] ) )
if var_obj is None :
var , constant = match . group ( "var" , "constant" )
if constant :
try :
var_obj = Variable ( constant ) . resolve ( { } )
except VariableDoesNotExist :
var_obj = None
elif var is None :
raise TemplateSyntaxError ( "Could not find variable at " "start of %s." % token )
else :
var_obj = Variable ( var )
else :
filter_name = match . group ( "filter_name" )
args = [ ]
constant_arg , var_arg = match . group ( "constant_arg" , "var_arg" )
if constant_arg :
args . append ( ( False , Variable ( constant_arg ) . resolve ( { } ) ) )
elif var_arg :
args . append ( ( True , Variable ( var_arg ) ) )
filter_func = parser . find_filter ( filter_name )
self . args_check ( filter_name , filter_func , args )
filters . append ( ( filter_func , args ) )
upto = match . end ( )
if upto != len ( token ) :
raise TemplateSyntaxError ( "Could not parse the remainder: '%s' " "from '%s'" % ( token [ upto : ] , token ) )
self . filters = filters
self . var = var_obj
def resolve ( self , context , ignore_failures = False ) :
if isinstance ( self . var , Variable ) :
try :
obj = self . var . resolve ( context )
except VariableDoesNotExist :
if ignore_failures :
obj = None
else :
if settings . TEMPLATE_STRING_IF_INVALID :
global invalid_var_format_string
if invalid_var_format_string is None :
invalid_var_format_string = '%s' in settings . TEMPLATE_STRING_IF_INVALID
if invalid_var_format_string :
return settings . TEMPLATE_STRING_IF_INVALID % self . var
return settings . TEMPLATE_STRING_IF_INVALID
else :
obj = settings . TEMPLATE_STRING_IF_INVALID
else :
obj = self . var
for func , args in self . filters :
arg_vals = [ ]
for lookup , arg in args :
if not lookup :
arg_vals . append ( mark_safe ( arg ) )
else :
arg_vals . append ( arg . resolve ( context ) )
if getattr ( func , 'expects_localtime' , False ) :
obj = template_localtime ( obj , context . use_tz )
if getattr ( func , 'needs_autoescape' , False ) :
new_obj = func ( obj , autoescape = context . autoescape , * arg_vals )
else :
new_obj = func ( obj , * arg_vals )
if getattr ( func , 'is_safe' , False ) and isinstance ( obj , SafeData ) :
obj = mark_safe ( new_obj )
elif isinstance ( obj , EscapeData ) :
obj = mark_for_escaping ( new_obj )
else :
obj = new_obj
return obj
def args_check ( name , func , provided ) :
provided = list ( provided )
plen = len ( provided ) + 1
func = getattr ( func , '_decorated_function' , func )
args , varargs , varkw , defaults = getargspec ( func )
alen = len ( args )
dlen = len ( defaults or [ ] )
if plen < ( alen - dlen ) or plen > alen :
raise TemplateSyntaxError ( "%s requires %d arguments, %d provided" % ( name , alen - dlen , plen ) )
return True
args_check = staticmethod ( args_check )
def __str__ ( self ) :
return self . token
def resolve_variable ( path , context ) :
warnings . warn ( "resolve_variable() is deprecated. Use django.template." "Variable(path).resolve(context) instead" , RemovedInDjango20Warning , stacklevel = 2 )
return Variable ( path ) . resolve ( context )
class Variable ( object ) :
def __init__ ( self , var ) :
self . var = var
self . literal = None
self . lookups = None
self . translate = False
self . message_context = None
if not isinstance ( var , six . string_types ) :
raise TypeError ( "Variable must be a string or number, got %s" % type ( var ) )
try :
self . literal = float ( var )
if '.' not in var and 'e' not in var . lower ( ) :
self . literal = int ( self . literal )
if var . endswith ( '.' ) :
raise ValueError
except ValueError :
if var . startswith ( '_(' ) and var . endswith ( ')' ) :
self . translate = True
var = var [ 2 : - 1 ]
try :
self . literal = mark_safe ( unescape_string_literal ( var ) )
except ValueError :
if var . find ( VARIABLE_ATTRIBUTE_SEPARATOR + '_' ) > - 1 or var [ 0 ] == '_' :
raise TemplateSyntaxError ( "Variables and attributes may " "not begin with underscores: '%s'" % var )
self . lookups = tuple ( var . split ( VARIABLE_ATTRIBUTE_SEPARATOR ) )
def resolve ( self , context ) :
if self . lookups is not None :
value = self . _resolve_lookup ( context )
else :
value = self . literal
if self . translate :
if self . message_context :
return pgettext_lazy ( self . message_context , value )
else :
return ugettext_lazy ( value )
return value
def __repr__ ( self ) :
return "<%s: %r>" % ( self . __class__ . __name__ , self . var )
def __str__ ( self ) :
return self . var
def _resolve_lookup ( self , context ) :
current = context
try :
for bit in self . lookups :
try :
current = current [ bit ]
except ( TypeError , AttributeError , KeyError , ValueError ) :
try :
if isinstance ( current , BaseContext ) and getattr ( type ( current ) , bit ) :
raise AttributeError
current = getattr ( current , bit )
except ( TypeError , AttributeError ) :
try :
current = current [ int ( bit ) ]
except ( IndexError , ValueError , KeyError , TypeError ) :
raise VariableDoesNotExist ( "Failed lookup for key " "[%s] in %r" , ( bit , current ) )
if callable ( current ) :
if getattr ( current , 'do_not_call_in_templates' , False ) :
pass
elif getattr ( current , 'alters_data' , False ) :
current = settings . TEMPLATE_STRING_IF_INVALID
else :
try :
current = current ( )
except TypeError :
try :
getcallargs ( current )
except TypeError :
current = settings . TEMPLATE_STRING_IF_INVALID
else :
raise
except Exception as e :
if getattr ( e , 'silent_variable_failure' , False ) :
current = settings . TEMPLATE_STRING_IF_INVALID
else :
raise
return current
class Node ( object ) :
must_be_first = False
child_nodelists = ( 'nodelist' , )
def render ( self , context ) :
pass
def __iter__ ( self ) :
yield self
def get_nodes_by_type ( self , nodetype ) :
nodes = [ ]
if isinstance ( self , nodetype ) :
nodes . append ( self )
for attr in self . child_nodelists :
nodelist = getattr ( self , attr , None )
if nodelist :
nodes . extend ( nodelist . get_nodes_by_type ( nodetype ) )
return nodes
class NodeList ( list ) :
contains_nontext = False
def render ( self , context ) :
bits = [ ]
for node in self :
if isinstance ( node , Node ) :
bit = self . render_node ( node , context )
else :
bit = node
bits . append ( force_text ( bit ) )
return mark_safe ( '' . join ( bits ) )
def get_nodes_by_type ( self , nodetype ) :
nodes = [ ]
for node in self :
nodes . extend ( node . get_nodes_by_type ( nodetype ) )
return nodes
def render_node ( self , node , context ) :
return node . render ( context )
class TextNode ( Node ) :
def __init__ ( self , s ) :
self . s = s
def __repr__ ( self ) :
return force_str ( "" % self . s [ : 25 ] , 'ascii' , errors = 'replace' )
def render ( self , context ) :
return self . s
def render_value_in_context ( value , context ) :
value = template_localtime ( value , use_tz = context . use_tz )
value = localize ( value , use_l10n = context . use_l10n )
value = force_text ( value )
if ( ( context . autoescape and not isinstance ( value , SafeData ) ) or isinstance ( value , EscapeData ) ) :
return escape ( value )
else :
return value
class VariableNode ( Node ) :
def __init__ ( self , filter_expression ) :
self . filter_expression = filter_expression
def __repr__ ( self ) :
return "" % self . filter_expression
def render ( self , context ) :
try :
output = self . filter_expression . resolve ( context )
except UnicodeDecodeError :
return ''
return render_value_in_context ( output , context )
kwarg_re = re . compile ( r"(?:(\w+)=)?(.+)" )
def token_kwargs ( bits , parser , support_legacy = False ) :
if not bits :
return { }
match = kwarg_re . match ( bits [ 0 ] )
kwarg_format = match and match . group ( 1 )
if not kwarg_format :
if not support_legacy :
return { }
if len ( bits ) < 3 or bits [ 1 ] != 'as' :
return { }
kwargs = { }
while bits :
if kwarg_format :
match = kwarg_re . match ( bits [ 0 ] )
if not match or not match . group ( 1 ) :
return kwargs
key , value = match . groups ( )
del bits [ : 1 ]
else :
if len ( bits ) < 3 or bits [ 1 ] != 'as' :
return kwargs
key , value = bits [ 2 ] , bits [ 0 ]
del bits [ : 3 ]
kwargs [ key ] = parser . compile_filter ( value )
if bits and not kwarg_format :
if bits [ 0 ] != 'and' :
return kwargs
del bits [ : 1 ]
return kwargs
def parse_bits ( parser , bits , params , varargs , varkw , defaults , takes_context , name ) :
if takes_context :
if params [ 0 ] == 'context' :
params = params [ 1 : ]
else :
raise TemplateSyntaxError ( "'%s' is decorated with takes_context=True so it must " "have a first argument of 'context'" % name )
args = [ ]
kwargs = { }
unhandled_params = list ( params )
for bit in bits :
kwarg = token_kwargs ( [ bit ] , parser )
if kwarg :
param , value = list ( six . iteritems ( kwarg ) ) [ 0 ]
if param not in params and varkw is None :
raise TemplateSyntaxError ( "'%s' received unexpected keyword argument '%s'" % ( name , param ) )
elif param in kwargs :
raise TemplateSyntaxError ( "'%s' received multiple values for keyword argument '%s'" % ( name , param ) )
else :
kwargs [ str ( param ) ] = value
if param in unhandled_params :
unhandled_params . remove ( param )
else :
if kwargs :
raise TemplateSyntaxError ( "'%s' received some positional argument(s) after some " "keyword argument(s)" % name )
else :
args . append ( parser . compile_filter ( bit ) )
try :
unhandled_params . pop ( 0 )
except IndexError :
if varargs is None :
raise TemplateSyntaxError ( "'%s' received too many positional arguments" % name )
if defaults is not None :
unhandled_params = unhandled_params [ : - len ( defaults ) ]
if unhandled_params :
raise TemplateSyntaxError ( "'%s' did not receive value(s) for the argument(s): %s" % ( name , ", " . join ( "'%s'" % p for p in unhandled_params ) ) )
return args , kwargs
def generic_tag_compiler ( parser , token , params , varargs , varkw , defaults , name , takes_context , node_class ) :
bits = token . split_contents ( ) [ 1 : ]
args , kwargs = parse_bits ( parser , bits , params , varargs , varkw , defaults , takes_context , name )
return node_class ( takes_context , args , kwargs )
class TagHelperNode ( Node ) :
def __init__ ( self , takes_context , args , kwargs ) :
self . takes_context = takes_context
self . args = args
self . kwargs = kwargs
def get_resolved_arguments ( self , context ) :
resolved_args = [ var . resolve ( context ) for var in self . args ]
if self . takes_context :
resolved_args = [ context ] + resolved_args
resolved_kwargs = dict ( ( k , v . resolve ( context ) ) for k , v in self . kwargs . items ( ) )
return resolved_args , resolved_kwargs
class Library ( object ) :
def __init__ ( self ) :
self . filters = { }
self . tags = { }
def tag ( self , name = None , compile_function = None ) :
if name is None and compile_function is None :
return self . tag_function
elif name is not None and compile_function is None :
if callable ( name ) :
return self . tag_function ( name )
else :
def dec ( func ) :
return self . tag ( name , func )
return dec
elif name is not None and compile_function is not None :
self . tags [ name ] = compile_function
return compile_function
else :
raise InvalidTemplateLibrary ( "Unsupported arguments to " "Library.tag: (%r, %r)" , ( name , compile_function ) )
def tag_function ( self , func ) :
self . tags [ getattr ( func , "_decorated_function" , func ) . __name__ ] = func
return func
def filter ( self , name = None , filter_func = None , ** flags ) :
if name is None and filter_func is None :
def dec ( func ) :
return self . filter_function ( func , ** flags )
return dec
elif name is not None and filter_func is None :
if callable ( name ) :
return self . filter_function ( name , ** flags )
else :
def dec ( func ) :
return self . filter ( name , func , ** flags )
return dec
elif name is not None and filter_func is not None :
self . filters [ name ] = filter_func
for attr in ( 'expects_localtime' , 'is_safe' , 'needs_autoescape' ) :
if attr in flags :
value = flags [ attr ]
setattr ( filter_func , attr , value )
if hasattr ( filter_func , "_decorated_function" ) :
setattr ( filter_func . _decorated_function , attr , value )
filter_func . _filter_name = name
return filter_func
else :
raise InvalidTemplateLibrary ( "Unsupported arguments to " "Library.filter: (%r, %r)" , ( name , filter_func ) )
def filter_function ( self , func , ** flags ) :
name = getattr ( func , "_decorated_function" , func ) . __name__
return self . filter ( name , func , ** flags )
def simple_tag ( self , func = None , takes_context = None , name = None ) :
def dec ( func ) :
params , varargs , varkw , defaults = getargspec ( func )
class SimpleNode ( TagHelperNode ) :
def render ( self , context ) :
resolved_args , resolved_kwargs = self . get_resolved_arguments ( context )
return func ( * resolved_args , ** resolved_kwargs )
function_name = ( name or getattr ( func , '_decorated_function' , func ) . __name__ )
compile_func = partial ( generic_tag_compiler , params = params , varargs = varargs , varkw = varkw , defaults = defaults , name = function_name , takes_context = takes_context , node_class = SimpleNode )
compile_func . __doc__ = func . __doc__
self . tag ( function_name , compile_func )
return func
if func is None :
return dec
elif callable ( func ) :
return dec ( func )
else :
raise TemplateSyntaxError ( "Invalid arguments provided to simple_tag" )
def assignment_tag ( self , func = None , takes_context = None , name = None ) :
def dec ( func ) :
params , varargs , varkw , defaults = getargspec ( func )
class AssignmentNode ( TagHelperNode ) :
def __init__ ( self , takes_context , args , kwargs , target_var ) :
super ( AssignmentNode , self ) . __init__ ( takes_context , args , kwargs )
self . target_var = target_var
def render ( self , context ) :
resolved_args , resolved_kwargs = self . get_resolved_arguments ( context )
context [ self . target_var ] = func ( * resolved_args , ** resolved_kwargs )
return ''
function_name = ( name or getattr ( func , '_decorated_function' , func ) . __name__ )
def compile_func ( parser , token ) :
bits = token . split_contents ( ) [ 1 : ]
if len ( bits ) < 2 or bits [ - 2 ] != 'as' :
raise TemplateSyntaxError ( "'%s' tag takes at least 2 arguments and the " "second last argument must be 'as'" % function_name )
target_var = bits [ - 1 ]
bits = bits [ : - 2 ]
args , kwargs = parse_bits ( parser , bits , params , varargs , varkw , defaults , takes_context , function_name )
return AssignmentNode ( takes_context , args , kwargs , target_var )
compile_func . __doc__ = func . __doc__
self . tag ( function_name , compile_func )
return func
if func is None :
return dec
elif callable ( func ) :
return dec ( func )
else :
raise TemplateSyntaxError ( "Invalid arguments provided to assignment_tag" )
def inclusion_tag ( self , file_name , context_class = Context , takes_context = False , name = None ) :
def dec ( func ) :
params , varargs , varkw , defaults = getargspec ( func )
class InclusionNode ( TagHelperNode ) :
def render ( self , context ) :
resolved_args , resolved_kwargs = self . get_resolved_arguments ( context )
_dict = func ( * resolved_args , ** resolved_kwargs )
if not getattr ( self , 'nodelist' , False ) :
from django . template . loader import get_template , select_template
if isinstance ( file_name , Template ) :
t = file_name
elif not isinstance ( file_name , six . string_types ) and is_iterable ( file_name ) :
t = select_template ( file_name )
else :
t = get_template ( file_name )
self . nodelist = t . nodelist
new_context = context_class ( _dict , ** { 'autoescape' : context . autoescape , 'current_app' : context . current_app , 'use_l10n' : context . use_l10n , 'use_tz' : context . use_tz , } )
csrf_token = context . get ( 'csrf_token' , None )
if csrf_token is not None :
new_context [ 'csrf_token' ] = csrf_token
return self . nodelist . render ( new_context )
function_name = ( name or getattr ( func , '_decorated_function' , func ) . __name__ )
compile_func = partial ( generic_tag_compiler , params = params , varargs = varargs , varkw = varkw , defaults = defaults , name = function_name , takes_context = takes_context , node_class = InclusionNode )
compile_func . __doc__ = func . __doc__
self . tag ( function_name , compile_func )
return func
return dec
def is_library_missing ( name ) :
path , module = name . rsplit ( '.' , 1 )
try :
package = import_module ( path )
return not module_has_submodule ( package , module )
except ImportError :
return is_library_missing ( path )
def import_library ( taglib_module ) :
try :
mod = import_module ( taglib_module )
except ImportError as e :
if is_library_missing ( taglib_module ) :
return None
else :
raise InvalidTemplateLibrary ( "ImportError raised loading %s: %s" % ( taglib_module , e ) )
try :
return mod . register
except AttributeError :
raise InvalidTemplateLibrary ( "Template library %s does not have " "a variable named 'register'" % taglib_module )
templatetags_modules = [ ]
def get_templatetags_modules ( ) :
global templatetags_modules
if not templatetags_modules :
_templatetags_modules = [ ]
templatetags_modules_candidates = [ 'django.templatetags' ]
templatetags_modules_candidates += [ '%s.templatetags' % app_config . name for app_config in apps . get_app_configs ( ) ]
for templatetag_module in templatetags_modules_candidates :
try :
import_module ( templatetag_module )
_templatetags_modules . append ( templatetag_module )
except ImportError :
continue
templatetags_modules = _templatetags_modules
return templatetags_modules
def get_library ( library_name ) :
lib = libraries . get ( library_name , None )
if not lib :
templatetags_modules = get_templatetags_modules ( )
tried_modules = [ ]
for module in templatetags_modules :
taglib_module = '%s.%s' % ( module , library_name )
tried_modules . append ( taglib_module )
lib = import_library ( taglib_module )
if lib :
libraries [ library_name ] = lib
break
if not lib :
raise InvalidTemplateLibrary ( "Template library %s not found, " "tried %s" % ( library_name , ',' . join ( tried_modules ) ) )
return lib
def add_to_builtins ( module ) :
builtins . append ( import_library ( module ) )
add_to_builtins ( 'django.template.defaulttags' )
add_to_builtins ( 'django.template.defaultfilters' )
add_to_builtins ( 'django.template.loader_tags' )
from copy import copy
from django . utils . module_loading import import_string
_standard_context_processors = None
_builtin_context_processors = ( 'django.core.context_processors.csrf' , )
class ContextPopException ( Exception ) :
pass
class ContextDict ( dict ) :
def __init__ ( self , context , * args , ** kwargs ) :
super ( ContextDict , self ) . __init__ ( * args , ** kwargs )
context . dicts . append ( self )
self . context = context
def __enter__ ( self ) :
return self
def __exit__ ( self , * args , ** kwargs ) :
self . context . pop ( )
class BaseContext ( object ) :
def __init__ ( self , dict_ = None ) :
self . _reset_dicts ( dict_ )
def _reset_dicts ( self , value = None ) :
builtins = { 'True' : True , 'False' : False , 'None' : None }
self . dicts = [ builtins ]
if value is not None :
self . dicts . append ( value )
def __copy__ ( self ) :
duplicate = copy ( super ( BaseContext , self ) )
duplicate . dicts = self . dicts [ : ]
return duplicate
def __repr__ ( self ) :
return repr ( self . dicts )
def __iter__ ( self ) :
for d in reversed ( self . dicts ) :
yield d
def push ( self , * args , ** kwargs ) :
return ContextDict ( self , * args , ** kwargs )
def pop ( self ) :
if len ( self . dicts ) == 1 :
raise ContextPopException
return self . dicts . pop ( )
def __setitem__ ( self , key , value ) :
self . dicts [ - 1 ] [ key ] = value
def __getitem__ ( self , key ) :
for d in reversed ( self . dicts ) :
if key in d :
return d [ key ]
raise KeyError ( key )
def __delitem__ ( self , key ) :
del self . dicts [ - 1 ] [ key ]
def has_key ( self , key ) :
for d in self . dicts :
if key in d :
return True
return False
def __contains__ ( self , key ) :
return self . has_key ( key )
def get ( self , key , otherwise = None ) :
for d in reversed ( self . dicts ) :
if key in d :
return d [ key ]
return otherwise
def new ( self , values = None ) :
new_context = copy ( self )
new_context . _reset_dicts ( values )
return new_context
def flatten ( self ) :
flat = { }
for d in self . dicts :
flat . update ( d )
return flat
def __eq__ ( self , other ) :
if isinstance ( other , BaseContext ) :
return self . flatten ( ) == other . flatten ( )
return False
class Context ( BaseContext ) :
def __init__ ( self , dict_ = None , autoescape = True , current_app = None , use_l10n = None , use_tz = None ) :
self . autoescape = autoescape
self . current_app = current_app
self . use_l10n = use_l10n
self . use_tz = use_tz
self . render_context = RenderContext ( )
super ( Context , self ) . __init__ ( dict_ )
def __copy__ ( self ) :
duplicate = super ( Context , self ) . __copy__ ( )
duplicate . render_context = copy ( self . render_context )
return duplicate
def update ( self , other_dict ) :
if not hasattr ( other_dict , '__getitem__' ) :
raise TypeError ( 'other_dict must be a mapping (dictionary-like) object.' )
self . dicts . append ( other_dict )
return other_dict
class RenderContext ( BaseContext ) :
def __iter__ ( self ) :
for d in self . dicts [ - 1 ] :
yield d
def has_key ( self , key ) :
return key in self . dicts [ - 1 ]
def get ( self , key , otherwise = None ) :
return self . dicts [ - 1 ] . get ( key , otherwise )
def __getitem__ ( self , key ) :
return self . dicts [ - 1 ] [ key ]
def get_standard_processors ( ) :
from django . conf import settings
global _standard_context_processors
if _standard_context_processors is None :
processors = [ ]
collect = [ ]
collect . extend ( _builtin_context_processors )
collect . extend ( settings . TEMPLATE_CONTEXT_PROCESSORS )
for path in collect :
func = import_string ( path )
processors . append ( func )
_standard_context_processors = tuple ( processors )
return _standard_context_processors
class RequestContext ( Context ) :
def __init__ ( self , request , dict_ = None , processors = None , current_app = None , use_l10n = None , use_tz = None ) :
Context . __init__ ( self , dict_ , current_app = current_app , use_l10n = use_l10n , use_tz = use_tz )
if processors is None :
processors = ( )
else :
processors = tuple ( processors )
updates = dict ( )
for processor in get_standard_processors ( ) + processors :
updates . update ( processor ( request ) )
self . update ( updates )
from django . template . base import Lexer , Parser , tag_re , NodeList , VariableNode , TemplateSyntaxError
from django . utils . encoding import force_text
from django . utils . html import escape
from django . utils . safestring import SafeData , EscapeData
from django . utils . formats import localize
from django . utils . timezone import template_localtime
class DebugLexer ( Lexer ) :
def __init__ ( self , template_string , origin ) :
super ( DebugLexer , self ) . __init__ ( template_string , origin )
def tokenize ( self ) :
result , upto = [ ] , 0
for match in tag_re . finditer ( self . template_string ) :
start , end = match . span ( )
if start > upto :
result . append ( self . create_token ( self . template_string [ upto : start ] , ( upto , start ) , False ) )
upto = start
result . append ( self . create_token ( self . template_string [ start : end ] , ( start , end ) , True ) )
upto = end
last_bit = self . template_string [ upto : ]
if last_bit :
result . append ( self . create_token ( last_bit , ( upto , upto + len ( last_bit ) ) , False ) )
return result
def create_token ( self , token_string , source , in_tag ) :
token = super ( DebugLexer , self ) . create_token ( token_string , in_tag )
token . source = self . origin , source
return token
class DebugParser ( Parser ) :
def __init__ ( self , lexer ) :
super ( DebugParser , self ) . __init__ ( lexer )
self . command_stack = [ ]
def enter_command ( self , command , token ) :
self . command_stack . append ( ( command , token . source ) )
def exit_command ( self ) :
self . command_stack . pop ( )
def error ( self , token , msg ) :
return self . source_error ( token . source , msg )
def source_error ( self , source , msg ) :
e = TemplateSyntaxError ( msg )
e . django_template_source = source
return e
def create_nodelist ( self ) :
return DebugNodeList ( )
def create_variable_node ( self , contents ) :
return DebugVariableNode ( contents )
def extend_nodelist ( self , nodelist , node , token ) :
node . source = token . source
super ( DebugParser , self ) . extend_nodelist ( nodelist , node , token )
def unclosed_block_tag ( self , parse_until ) :
command , source = self . command_stack . pop ( )
msg = "Unclosed tag '%s'. Looking for one of: %s " % ( command , ', ' . join ( parse_until ) )
raise self . source_error ( source , msg )
def compile_filter_error ( self , token , e ) :
if not hasattr ( e , 'django_template_source' ) :
e . django_template_source = token . source
def compile_function_error ( self , token , e ) :
if not hasattr ( e , 'django_template_source' ) :
e . django_template_source = token . source
class DebugNodeList ( NodeList ) :
def render_node ( self , node , context ) :
try :
return node . render ( context )
except Exception as e :
if not hasattr ( e , 'django_template_source' ) :
e . django_template_source = node . source
raise
class DebugVariableNode ( VariableNode ) :
def render ( self , context ) :
try :
output = self . filter_expression . resolve ( context )
output = template_localtime ( output , use_tz = context . use_tz )
output = localize ( output , use_l10n = context . use_l10n )
output = force_text ( output )
except UnicodeDecodeError :
return ''
except Exception as e :
if not hasattr ( e , 'django_template_source' ) :
e . django_template_source = self . source
raise
if ( context . autoescape and not isinstance ( output , SafeData ) ) or isinstance ( output , EscapeData ) :
return escape ( output )
else :
return output
from __future__ import unicode_literals
import re
import random as random_module
from decimal import Decimal , InvalidOperation , Context , ROUND_HALF_UP
from functools import wraps
from pprint import pformat
from django . template . base import Variable , Library , VariableDoesNotExist
from django . conf import settings
from django . utils import formats
from django . utils . dateformat import format , time_format
from django . utils . encoding import force_text , iri_to_uri
from django . utils . html import ( conditional_escape , escapejs , escape , urlize as _urlize , linebreaks , strip_tags , avoid_wrapping , remove_tags )
from django . utils . http import urlquote
from django . utils . text import Truncator , wrap , phone2numeric
from django . utils . safestring import mark_safe , SafeData , mark_for_escaping
from django . utils import six
from django . utils . timesince import timesince , timeuntil
from django . utils . translation import ugettext , ungettext
from django . utils . text import normalize_newlines , slugify as _slugify
register = Library ( )
def stringfilter ( func ) :
def _dec ( * args , ** kwargs ) :
if args :
args = list ( args )
args [ 0 ] = force_text ( args [ 0 ] )
if ( isinstance ( args [ 0 ] , SafeData ) and getattr ( _dec . _decorated_function , 'is_safe' , False ) ) :
return mark_safe ( func ( * args , ** kwargs ) )
return func ( * args , ** kwargs )
_dec . _decorated_function = getattr ( func , '_decorated_function' , func )
return wraps ( func ) ( _dec )
@ register . filter ( is_safe = True )
@ stringfilter
def addslashes ( value ) :
return value . replace ( '\\' , '\\\\' ) . replace ( '"' , '\\"' ) . replace ( "'" , "\\'" )
@ register . filter ( is_safe = True )
@ stringfilter
def capfirst ( value ) :
return value and value [ 0 ] . upper ( ) + value [ 1 : ]
@ register . filter ( "escapejs" )
@ stringfilter
def escapejs_filter ( value ) :
return escapejs ( value )
pos_inf = 1e200 * 1e200
neg_inf = - 1e200 * 1e200
nan = ( 1e200 * 1e200 ) // ( 1e200 * 1e200 )
special_floats = [ str ( pos_inf ) , str ( neg_inf ) , str ( nan ) ]
@ register . filter ( is_safe = True )
def floatformat ( text , arg = - 1 ) :
try :
input_val = force_text ( text )
d = Decimal ( input_val )
except UnicodeEncodeError :
return ''
except InvalidOperation :
if input_val in special_floats :
return input_val
try :
d = Decimal ( force_text ( float ( text ) ) )
except ( ValueError , InvalidOperation , TypeError , UnicodeEncodeError ) :
return ''
try :
p = int ( arg )
except ValueError :
return input_val
try :
m = int ( d ) - d
except ( ValueError , OverflowError , InvalidOperation ) :
return input_val
if not m and p < 0 :
return mark_safe ( formats . number_format ( '%d' % ( int ( d ) ) , 0 ) )
if p == 0 :
exp = Decimal ( 1 )
else :
exp = Decimal ( '1.0' ) / ( Decimal ( 10 ) ** abs ( p ) )
try :
tupl = d . as_tuple ( )
units = len ( tupl [ 1 ] ) - tupl [ 2 ]
prec = abs ( p ) + units + 1
sign , digits , exponent = d . quantize ( exp , ROUND_HALF_UP , Context ( prec = prec ) ) . as_tuple ( )
digits = [ six . text_type ( digit ) for digit in reversed ( digits ) ]
while len ( digits ) <= abs ( exponent ) :
digits . append ( '0' )
digits . insert ( - exponent , '.' )
if sign :
digits . append ( '-' )
number = '' . join ( reversed ( digits ) )
return mark_safe ( formats . number_format ( number , abs ( p ) ) )
except InvalidOperation :
return input_val
@ register . filter ( is_safe = True )
@ stringfilter
def iriencode ( value ) :
return force_text ( iri_to_uri ( value ) )
@ register . filter ( is_safe = True , needs_autoescape = True )
@ stringfilter
def linenumbers ( value , autoescape = None ) :
lines = value . split ( '\n' )
width = six . text_type ( len ( six . text_type ( len ( lines ) ) ) )
if not autoescape or isinstance ( value , SafeData ) :
for i , line in enumerate ( lines ) :
lines [ i ] = ( "%0" + width + "d. %s" ) % ( i + 1 , line )
else :
for i , line in enumerate ( lines ) :
lines [ i ] = ( "%0" + width + "d. %s" ) % ( i + 1 , escape ( line ) )
return mark_safe ( '\n' . join ( lines ) )
@ register . filter ( is_safe = True )
@ stringfilter
def lower ( value ) :
return value . lower ( )
@ register . filter ( is_safe = False )
@ stringfilter
def make_list ( value ) :
return list ( value )
@ register . filter ( is_safe = True )
@ stringfilter
def slugify ( value ) :
return _slugify ( value )
@ register . filter ( is_safe = True )
def stringformat ( value , arg ) :
try :
return ( "%" + six . text_type ( arg ) ) % value
except ( ValueError , TypeError ) :
return ""
@ register . filter ( is_safe = True )
@ stringfilter
def title ( value ) :
t = re . sub ( "([a-z])'([A-Z])" , lambda m : m . group ( 0 ) . lower ( ) , value . title ( ) )
return re . sub ( "\d([A-Z])" , lambda m : m . group ( 0 ) . lower ( ) , t )
@ register . filter ( is_safe = True )
@ stringfilter
def truncatechars ( value , arg ) :
try :
length = int ( arg )
except ValueError :
return value
return Truncator ( value ) . chars ( length )
@ register . filter ( is_safe = True )
@ stringfilter
def truncatechars_html ( value , arg ) :
try :
length = int ( arg )
except ValueError :
return value
return Truncator ( value ) . chars ( length , html = True )
@ register . filter ( is_safe = True )
@ stringfilter
def truncatewords ( value , arg ) :
try :
length = int ( arg )
except ValueError :
return value
return Truncator ( value ) . words ( length , truncate = ' ...' )
@ register . filter ( is_safe = True )
@ stringfilter
def truncatewords_html ( value , arg ) :
try :
length = int ( arg )
except ValueError :
return value
return Truncator ( value ) . words ( length , html = True , truncate = ' ...' )
@ register . filter ( is_safe = False )
@ stringfilter
def upper ( value ) :
return value . upper ( )
@ register . filter ( is_safe = False )
@ stringfilter
def urlencode ( value , safe = None ) :
kwargs = { }
if safe is not None :
kwargs [ 'safe' ] = safe
return urlquote ( value , ** kwargs )
@ register . filter ( is_safe = True , needs_autoescape = True )
@ stringfilter
def urlize ( value , autoescape = None ) :
return mark_safe ( _urlize ( value , nofollow = True , autoescape = autoescape ) )
@ register . filter ( is_safe = True , needs_autoescape = True )
@ stringfilter
def urlizetrunc ( value , limit , autoescape = None ) :
return mark_safe ( _urlize ( value , trim_url_limit = int ( limit ) , nofollow = True , autoescape = autoescape ) )
@ register . filter ( is_safe = False )
@ stringfilter
def wordcount ( value ) :
return len ( value . split ( ) )
@ register . filter ( is_safe = True )
@ stringfilter
def wordwrap ( value , arg ) :
return wrap ( value , int ( arg ) )
@ register . filter ( is_safe = True )
@ stringfilter
def ljust ( value , arg ) :
return value . ljust ( int ( arg ) )
@ register . filter ( is_safe = True )
@ stringfilter
def rjust ( value , arg ) :
return value . rjust ( int ( arg ) )
@ register . filter ( is_safe = True )
@ stringfilter
def center ( value , arg ) :
return value . center ( int ( arg ) )
@ register . filter
@ stringfilter
def cut ( value , arg ) :
safe = isinstance ( value , SafeData )
value = value . replace ( arg , '' )
if safe and arg != ';' :
return mark_safe ( value )
return value
@ register . filter ( "escape" , is_safe = True )
@ stringfilter
def escape_filter ( value ) :
return mark_for_escaping ( value )
@ register . filter ( is_safe = True )
@ stringfilter
def force_escape ( value ) :
return escape ( value )
@ register . filter ( "linebreaks" , is_safe = True , needs_autoescape = True )
@ stringfilter
def linebreaks_filter ( value , autoescape = None ) :
autoescape = autoescape and not isinstance ( value , SafeData )
return mark_safe ( linebreaks ( value , autoescape ) )
@ register . filter ( is_safe = True , needs_autoescape = True )
@ stringfilter
def linebreaksbr ( value , autoescape = None ) :
autoescape = autoescape and not isinstance ( value , SafeData )
value = normalize_newlines ( value )
if autoescape :
value = escape ( value )
return mark_safe ( value . replace ( '\n' , '
' ) )
@ register . filter ( is_safe = True )
@ stringfilter
def safe ( value ) :
return mark_safe ( value )
@ register . filter ( is_safe = True )
def safeseq ( value ) :
return [ mark_safe ( force_text ( obj ) ) for obj in value ]
@ register . filter ( is_safe = True )
@ stringfilter
def removetags ( value , tags ) :
return remove_tags ( value , tags )
@ register . filter ( is_safe = True )
@ stringfilter
def striptags ( value ) :
return strip_tags ( value )
@ register . filter ( is_safe = False )
def dictsort ( value , arg ) :
try :
return sorted ( value , key = Variable ( arg ) . resolve )
except ( TypeError , VariableDoesNotExist ) :
return ''
@ register . filter ( is_safe = False )
def dictsortreversed ( value , arg ) :
try :
return sorted ( value , key = Variable ( arg ) . resolve , reverse = True )
except ( TypeError , VariableDoesNotExist ) :
return ''
@ register . filter ( is_safe = False )
def first ( value ) :
try :
return value [ 0 ]
except IndexError :
return ''
@ register . filter ( is_safe = True , needs_autoescape = True )
def join ( value , arg , autoescape = None ) :
value = map ( force_text , value )
if autoescape :
value = [ conditional_escape ( v ) for v in value ]
try :
data = conditional_escape ( arg ) . join ( value )
except AttributeError :
return value
return mark_safe ( data )
@ register . filter ( is_safe = True )
def last ( value ) :
try :
return value [ - 1 ]
except IndexError :
return ''
@ register . filter ( is_safe = False )
def length ( value ) :
try :
return len ( value )
except ( ValueError , TypeError ) :
return 0
@ register . filter ( is_safe = False )
def length_is ( value , arg ) :
try :
return len ( value ) == int ( arg )
except ( ValueError , TypeError ) :
return ''
@ register . filter ( is_safe = True )
def random ( value ) :
return random_module . choice ( value )
@ register . filter ( "slice" , is_safe = True )
def slice_filter ( value , arg ) :
try :
bits = [ ]
for x in arg . split ( ':' ) :
if len ( x ) == 0 :
bits . append ( None )
else :
bits . append ( int ( x ) )
return value [ slice ( * bits ) ]
except ( ValueError , TypeError ) :
return value
@ register . filter ( is_safe = True , needs_autoescape = True )
def unordered_list ( value , autoescape = None ) :
if autoescape :
escaper = conditional_escape
else :
escaper = lambda x : x
def convert_old_style_list ( list_ ) :
if not isinstance ( list_ , ( tuple , list ) ) or len ( list_ ) != 2 :
return list_ , False
first_item , second_item = list_
if second_item == [ ] :
return [ first_item ] , True
try :
iter ( second_item )
except TypeError :
return list_ , False
old_style_list = True
new_second_item = [ ]
for sublist in second_item :
item , old_style_list = convert_old_style_list ( sublist )
if not old_style_list :
break
new_second_item . extend ( item )
if old_style_list :
second_item = new_second_item
return [ first_item , second_item ] , old_style_list
def _helper ( list_ , tabs = 1 ) :
indent = '\t' * tabs
output = [ ]
list_length = len ( list_ )
i = 0
while i < list_length :
title = list_ [ i ]
sublist = ''
sublist_item = None
if isinstance ( title , ( list , tuple ) ) :
sublist_item = title
title = ''
elif i < list_length - 1 :
next_item = list_ [ i + 1 ]
if next_item and isinstance ( next_item , ( list , tuple ) ) :
sublist_item = next_item
i += 1
if sublist_item :
sublist = _helper ( sublist_item , tabs + 1 )
sublist = '\n%s\n%s' % ( indent , sublist , indent , indent )
output . append ( '%s%s%s' % ( indent , escaper ( force_text ( title ) ) , sublist ) )
i += 1
return '\n' . join ( output )
value , converted = convert_old_style_list ( value )
return mark_safe ( _helper ( value ) )
@ register . filter ( is_safe = False )
def add ( value , arg ) :
try :
return int ( value ) + int ( arg )
except ( ValueError , TypeError ) :
try :
return value + arg
except Exception :
return ''
@ register . filter ( is_safe = False )
def get_digit ( value , arg ) :
try :
arg = int ( arg )
value = int ( value )
except ValueError :
return value
if arg < 1 :
return value
try :
return int ( str ( value ) [ - arg ] )
except IndexError :
return 0
@ register . filter ( expects_localtime = True , is_safe = False )
def date ( value , arg = None ) :
if value in ( None , '' ) :
return ''
if arg is None :
arg = settings . DATE_FORMAT
try :
return formats . date_format ( value , arg )
except AttributeError :
try :
return format ( value , arg )
except AttributeError :
return ''
@ register . filter ( expects_localtime = True , is_safe = False )
def time ( value , arg = None ) :
if value in ( None , '' ) :
return ''
if arg is None :
arg = settings . TIME_FORMAT
try :
return formats . time_format ( value , arg )
except AttributeError :
try :
return time_format ( value , arg )
except AttributeError :
return ''
@ register . filter ( "timesince" , is_safe = False )
def timesince_filter ( value , arg = None ) :
if not value :
return ''
try :
if arg :
return timesince ( value , arg )
return timesince ( value )
except ( ValueError , TypeError ) :
return ''
@ register . filter ( "timeuntil" , is_safe = False )
def timeuntil_filter ( value , arg = None ) :
if not value :
return ''
try :
return timeuntil ( value , arg )
except ( ValueError , TypeError ) :
return ''
@ register . filter ( is_safe = False )
def default ( value , arg ) :
return value or arg
@ register . filter ( is_safe = False )
def default_if_none ( value , arg ) :
if value is None :
return arg
return value
@ register . filter ( is_safe = False )
def divisibleby ( value , arg ) :
return int ( value ) % int ( arg ) == 0
@ register . filter ( is_safe = False )
def yesno ( value , arg = None ) :
if arg is None :
arg = ugettext ( 'yes,no,maybe' )
bits = arg . split ( ',' )
if len ( bits ) < 2 :
return value
try :
yes , no , maybe = bits
except ValueError :
yes , no , maybe = bits [ 0 ] , bits [ 1 ] , bits [ 1 ]
if value is None :
return maybe
if value :
return yes
return no
@ register . filter ( is_safe = True )
def filesizeformat ( bytes ) :
try :
bytes = float ( bytes )
except ( TypeError , ValueError , UnicodeDecodeError ) :
value = ungettext ( "%(size)d byte" , "%(size)d bytes" , 0 ) % { 'size' : 0 }
return avoid_wrapping ( value )
filesize_number_format = lambda value : formats . number_format ( round ( value , 1 ) , 1 )
KB = 1 << 10
MB = 1 << 20
GB = 1 << 30
TB = 1 << 40
PB = 1 << 50
if bytes < KB :
value = ungettext ( "%(size)d byte" , "%(size)d bytes" , bytes ) % { 'size' : bytes }
elif bytes < MB :
value = ugettext ( "%s KB" ) % filesize_number_format ( bytes / KB )
elif bytes < GB :
value = ugettext ( "%s MB" ) % filesize_number_format ( bytes / MB )
elif bytes < TB :
value = ugettext ( "%s GB" ) % filesize_number_format ( bytes / GB )
elif bytes < PB :
value = ugettext ( "%s TB" ) % filesize_number_format ( bytes / TB )
else :
value = ugettext ( "%s PB" ) % filesize_number_format ( bytes / PB )
return avoid_wrapping ( value )
@ register . filter ( is_safe = False )
def pluralize ( value , arg = 's' ) :
if ',' not in arg :
arg = ',' + arg
bits = arg . split ( ',' )
if len ( bits ) > 2 :
return ''
singular_suffix , plural_suffix = bits [ : 2 ]
try :
if float ( value ) != 1 :
return plural_suffix
except ValueError :
pass
except TypeError :
try :
if len ( value ) != 1 :
return plural_suffix
except TypeError :
pass
return singular_suffix
@ register . filter ( "phone2numeric" , is_safe = True )
def phone2numeric_filter ( value ) :
return phone2numeric ( value )
@ register . filter ( is_safe = True )
def pprint ( value ) :
try :
return pformat ( value )
except Exception as e :
return "Error in formatting: %s" % force_text ( e , errors = "replace" )
from __future__ import unicode_literals
import os
import sys
import re
from datetime import datetime
from itertools import groupby , cycle as itertools_cycle
import warnings
from django . conf import settings
from django . template . base import ( Node , NodeList , Template , Context , Library , TemplateSyntaxError , VariableDoesNotExist , InvalidTemplateLibrary , BLOCK_TAG_START , BLOCK_TAG_END , VARIABLE_TAG_START , VARIABLE_TAG_END , SINGLE_BRACE_START , SINGLE_BRACE_END , COMMENT_TAG_START , COMMENT_TAG_END , VARIABLE_ATTRIBUTE_SEPARATOR , get_library , token_kwargs , kwarg_re , render_value_in_context )
from django . template . smartif import IfParser , Literal
from django . template . defaultfilters import date
from django . utils . deprecation import RemovedInDjango20Warning
from django . utils . encoding import force_text , smart_text
from django . utils . safestring import mark_safe
from django . utils . html import format_html
from django . utils import six
from django . utils import timezone
register = Library ( )
class AutoEscapeControlNode ( Node ) :
def __init__ ( self , setting , nodelist ) :
self . setting , self . nodelist = setting , nodelist
def render ( self , context ) :
old_setting = context . autoescape
context . autoescape = self . setting
output = self . nodelist . render ( context )
context . autoescape = old_setting
if self . setting :
return mark_safe ( output )
else :
return output
class CommentNode ( Node ) :
def render ( self , context ) :
return ''
class CsrfTokenNode ( Node ) :
def render ( self , context ) :
csrf_token = context . get ( 'csrf_token' , None )
if csrf_token :
if csrf_token == 'NOTPROVIDED' :
return format_html ( "" )
else :
return format_html ( "" , csrf_token )
else :
if settings . DEBUG :
warnings . warn ( "A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext." )
return ''
class CycleNode ( Node ) :
def __init__ ( self , cyclevars , variable_name = None , silent = False ) :
self . cyclevars = cyclevars
self . variable_name = variable_name
self . silent = silent
def render ( self , context ) :
if self not in context . render_context :
context . render_context [ self ] = itertools_cycle ( self . cyclevars )
cycle_iter = context . render_context [ self ]
value = next ( cycle_iter ) . resolve ( context )
if self . variable_name :
context [ self . variable_name ] = value
if self . silent :
return ''
return render_value_in_context ( value , context )
class DebugNode ( Node ) :
def render ( self , context ) :
from pprint import pformat
output = [ pformat ( val ) for val in context ]
output . append ( '\n\n' )
output . append ( pformat ( sys . modules ) )
return '' . join ( output )
class FilterNode ( Node ) :
def __init__ ( self , filter_expr , nodelist ) :
self . filter_expr , self . nodelist = filter_expr , nodelist
def render ( self , context ) :
output = self . nodelist . render ( context )
with context . push ( var = output ) :
return self . filter_expr . resolve ( context )
class FirstOfNode ( Node ) :
def __init__ ( self , variables ) :
self . vars = variables
def render ( self , context ) :
for var in self . vars :
value = var . resolve ( context , True )
if value :
return render_value_in_context ( value , context )
return ''
class ForNode ( Node ) :
child_nodelists = ( 'nodelist_loop' , 'nodelist_empty' )
def __init__ ( self , loopvars , sequence , is_reversed , nodelist_loop , nodelist_empty = None ) :
self . loopvars , self . sequence = loopvars , sequence
self . is_reversed = is_reversed
self . nodelist_loop = nodelist_loop
if nodelist_empty is None :
self . nodelist_empty = NodeList ( )
else :
self . nodelist_empty = nodelist_empty
def __repr__ ( self ) :
reversed_text = ' reversed' if self . is_reversed else ''
return "" % ( ', ' . join ( self . loopvars ) , self . sequence , len ( self . nodelist_loop ) , reversed_text )
def __iter__ ( self ) :
for node in self . nodelist_loop :
yield node
for node in self . nodelist_empty :
yield node
def render ( self , context ) :
if 'forloop' in context :
parentloop = context [ 'forloop' ]
else :
parentloop = { }
with context . push ( ) :
try :
values = self . sequence . resolve ( context , True )
except VariableDoesNotExist :
values = [ ]
if values is None :
values = [ ]
if not hasattr ( values , '__len__' ) :
values = list ( values )
len_values = len ( values )
if len_values < 1 :
return self . nodelist_empty . render ( context )
nodelist = [ ]
if self . is_reversed :
values = reversed ( values )
num_loopvars = len ( self . loopvars )
unpack = num_loopvars > 1
loop_dict = context [ 'forloop' ] = { 'parentloop' : parentloop }
for i , item in enumerate ( values ) :
loop_dict [ 'counter0' ] = i
loop_dict [ 'counter' ] = i + 1
loop_dict [ 'revcounter' ] = len_values - i
loop_dict [ 'revcounter0' ] = len_values - i - 1
loop_dict [ 'first' ] = ( i == 0 )
loop_dict [ 'last' ] = ( i == len_values - 1 )
pop_context = False
if unpack :
if not isinstance ( item , ( list , tuple ) ) :
len_item = 1
else :
len_item = len ( item )
if num_loopvars != len_item :
warnings . warn ( "Need {0} values to unpack in for loop; got {1}. " "This will raise an exception in Django 2.0." . format ( num_loopvars , len_item ) , RemovedInDjango20Warning )
try :
unpacked_vars = dict ( zip ( self . loopvars , item ) )
except TypeError :
pass
else :
pop_context = True
context . update ( unpacked_vars )
else :
context [ self . loopvars [ 0 ] ] = item
if settings . TEMPLATE_DEBUG :
for node in self . nodelist_loop :
try :
nodelist . append ( node . render ( context ) )
except Exception as e :
if not hasattr ( e , 'django_template_source' ) :
e . django_template_source = node . source
raise
else :
for node in self . nodelist_loop :
nodelist . append ( node . render ( context ) )
if pop_context :
context . pop ( )
return mark_safe ( '' . join ( force_text ( n ) for n in nodelist ) )
class IfChangedNode ( Node ) :
child_nodelists = ( 'nodelist_true' , 'nodelist_false' )
def __init__ ( self , nodelist_true , nodelist_false , * varlist ) :
self . nodelist_true , self . nodelist_false = nodelist_true , nodelist_false
self . _varlist = varlist
def render ( self , context ) :
state_frame = self . _get_context_stack_frame ( context )
if self not in state_frame :
state_frame [ self ] = None
nodelist_true_output = None
try :
if self . _varlist :
compare_to = [ var . resolve ( context , True ) for var in self . _varlist ]
else :
compare_to = nodelist_true_output = self . nodelist_true . render ( context )
except VariableDoesNotExist :
compare_to = None
if compare_to != state_frame [ self ] :
state_frame [ self ] = compare_to
return nodelist_true_output or self . nodelist_true . render ( context )
elif self . nodelist_false :
return self . nodelist_false . render ( context )
return ''
def _get_context_stack_frame ( self , context ) :
if 'forloop' in context :
return context [ 'forloop' ]
else :
return context . render_context
class IfEqualNode ( Node ) :
child_nodelists = ( 'nodelist_true' , 'nodelist_false' )
def __init__ ( self , var1 , var2 , nodelist_true , nodelist_false , negate ) :
self . var1 , self . var2 = var1 , var2
self . nodelist_true , self . nodelist_false = nodelist_true , nodelist_false
self . negate = negate
def __repr__ ( self ) :
return ""
def render ( self , context ) :
val1 = self . var1 . resolve ( context , True )
val2 = self . var2 . resolve ( context , True )
if ( self . negate and val1 != val2 ) or ( not self . negate and val1 == val2 ) :
return self . nodelist_true . render ( context )
return self . nodelist_false . render ( context )
class IfNode ( Node ) :
def __init__ ( self , conditions_nodelists ) :
self . conditions_nodelists = conditions_nodelists
def __repr__ ( self ) :
return ""
def __iter__ ( self ) :
for _ , nodelist in self . conditions_nodelists :
for node in nodelist :
yield node
@ property
def nodelist ( self ) :
return NodeList ( node for _ , nodelist in self . conditions_nodelists for node in nodelist )
def render ( self , context ) :
for condition , nodelist in self . conditions_nodelists :
if condition is not None :
try :
match = condition . eval ( context )
except VariableDoesNotExist :
match = None
else :
match = True
if match :
return nodelist . render ( context )
return ''
class RegroupNode ( Node ) :
def __init__ ( self , target , expression , var_name ) :
self . target , self . expression = target , expression
self . var_name = var_name
def resolve_expression ( self , obj , context ) :
context [ self . var_name ] = obj
return self . expression . resolve ( context , True )
def render ( self , context ) :
obj_list = self . target . resolve ( context , True )
if obj_list is None :
context [ self . var_name ] = [ ]
return ''
context [ self . var_name ] = [ { 'grouper' : key , 'list' : list ( val ) } for key , val in groupby ( obj_list , lambda obj : self . resolve_expression ( obj , context ) ) ]
return ''
def include_is_allowed ( filepath ) :
filepath = os . path . abspath ( filepath )
for root in settings . ALLOWED_INCLUDE_ROOTS :
if filepath . startswith ( root ) :
return True
return False
class SsiNode ( Node ) :
def __init__ ( self , filepath , parsed ) :
self . filepath = filepath
self . parsed = parsed
def render ( self , context ) :
filepath = self . filepath . resolve ( context )
if not include_is_allowed ( filepath ) :
if settings . DEBUG :
return "[Didn't have permission to include file]"
else :
return ''
try :
with open ( filepath , 'r' ) as fp :
output = fp . read ( )
except IOError :
output = ''
if self . parsed :
try :
t = Template ( output , name = filepath )
return t . render ( context )
except TemplateSyntaxError as e :
if settings . DEBUG :
return "[Included template had syntax error: %s]" % e
else :
return ''
return output
class LoadNode ( Node ) :
def render ( self , context ) :
return ''
class NowNode ( Node ) :
def __init__ ( self , format_string ) :
self . format_string = format_string
def render ( self , context ) :
tzinfo = timezone . get_current_timezone ( ) if settings . USE_TZ else None
return date ( datetime . now ( tz = tzinfo ) , self . format_string )
class SpacelessNode ( Node ) :
def __init__ ( self , nodelist ) :
self . nodelist = nodelist
def render ( self , context ) :
from django . utils . html import strip_spaces_between_tags
return strip_spaces_between_tags ( self . nodelist . render ( context ) . strip ( ) )
class TemplateTagNode ( Node ) :
mapping = { 'openblock' : BLOCK_TAG_START , 'closeblock' : BLOCK_TAG_END , 'openvariable' : VARIABLE_TAG_START , 'closevariable' : VARIABLE_TAG_END , 'openbrace' : SINGLE_BRACE_START , 'closebrace' : SINGLE_BRACE_END , 'opencomment' : COMMENT_TAG_START , 'closecomment' : COMMENT_TAG_END , }
def __init__ ( self , tagtype ) :
self . tagtype = tagtype
def render ( self , context ) :
return self . mapping . get ( self . tagtype , '' )
class URLNode ( Node ) :
def __init__ ( self , view_name , args , kwargs , asvar ) :
self . view_name = view_name
self . args = args
self . kwargs = kwargs
self . asvar = asvar
def render ( self , context ) :
from django . core . urlresolvers import reverse , NoReverseMatch
args = [ arg . resolve ( context ) for arg in self . args ]
kwargs = dict ( ( smart_text ( k , 'ascii' ) , v . resolve ( context ) ) for k , v in self . kwargs . items ( ) )
view_name = self . view_name . resolve ( context )
url = ''
try :
url = reverse ( view_name , args = args , kwargs = kwargs , current_app = context . current_app )
except NoReverseMatch :
exc_info = sys . exc_info ( )
if settings . SETTINGS_MODULE :
project_name = settings . SETTINGS_MODULE . split ( '.' ) [ 0 ]
try :
url = reverse ( project_name + '.' + view_name , args = args , kwargs = kwargs , current_app = context . current_app )
except NoReverseMatch :
if self . asvar is None :
six . reraise ( * exc_info )
else :
if self . asvar is None :
raise
if self . asvar :
context [ self . asvar ] = url
return ''
else :
return url
class VerbatimNode ( Node ) :
def __init__ ( self , content ) :
self . content = content
def render ( self , context ) :
return self . content
class WidthRatioNode ( Node ) :
def __init__ ( self , val_expr , max_expr , max_width , asvar = None ) :
self . val_expr = val_expr
self . max_expr = max_expr
self . max_width = max_width
self . asvar = asvar
def render ( self , context ) :
try :
value = self . val_expr . resolve ( context )
max_value = self . max_expr . resolve ( context )
max_width = int ( self . max_width . resolve ( context ) )
except VariableDoesNotExist :
return ''
except ( ValueError , TypeError ) :
raise TemplateSyntaxError ( "widthratio final argument must be a number" )
try :
value = float ( value )
max_value = float ( max_value )
ratio = ( value / max_value ) * max_width
result = str ( int ( round ( ratio ) ) )
except ZeroDivisionError :
return '0'
except ( ValueError , TypeError , OverflowError ) :
return ''
if self . asvar :
context [ self . asvar ] = result
return ''
else :
return result
class WithNode ( Node ) :
def __init__ ( self , var , name , nodelist , extra_context = None ) :
self . nodelist = nodelist
self . extra_context = extra_context or { }
if name :
self . extra_context [ name ] = var
def __repr__ ( self ) :
return ""
def render ( self , context ) :
values = dict ( ( key , val . resolve ( context ) ) for key , val in six . iteritems ( self . extra_context ) )
with context . push ( ** values ) :
return self . nodelist . render ( context )
@ register . tag
def autoescape ( parser , token ) :
args = token . contents . split ( )
if len ( args ) != 2 :
raise TemplateSyntaxError ( "'autoescape' tag requires exactly one argument." )
arg = args [ 1 ]
if arg not in ( 'on' , 'off' ) :
raise TemplateSyntaxError ( "'autoescape' argument should be 'on' or 'off'" )
nodelist = parser . parse ( ( 'endautoescape' , ) )
parser . delete_first_token ( )
return AutoEscapeControlNode ( ( arg == 'on' ) , nodelist )
@ register . tag
def comment ( parser , token ) :
parser . skip_past ( 'endcomment' )
return CommentNode ( )
@ register . tag
def cycle ( parser , token ) :
args = token . split_contents ( )
if len ( args ) < 2 :
raise TemplateSyntaxError ( "'cycle' tag requires at least two arguments" )
if ',' in args [ 1 ] :
args [ 1 : 2 ] = [ '"%s"' % arg for arg in args [ 1 ] . split ( "," ) ]
if len ( args ) == 2 :
name = args [ 1 ]
if not hasattr ( parser , '_namedCycleNodes' ) :
raise TemplateSyntaxError ( "No named cycles in template. '%s' is not defined" % name )
if name not in parser . _namedCycleNodes :
raise TemplateSyntaxError ( "Named cycle '%s' does not exist" % name )
return parser . _namedCycleNodes [ name ]
as_form = False
if len ( args ) > 4 :
if args [ - 3 ] == "as" :
if args [ - 1 ] != "silent" :
raise TemplateSyntaxError ( "Only 'silent' flag is allowed after cycle's name, not '%s'." % args [ - 1 ] )
as_form = True
silent = True
args = args [ : - 1 ]
elif args [ - 2 ] == "as" :
as_form = True
silent = False
if as_form :
name = args [ - 1 ]
values = [ parser . compile_filter ( arg ) for arg in args [ 1 : - 2 ] ]
node = CycleNode ( values , name , silent = silent )
if not hasattr ( parser , '_namedCycleNodes' ) :
parser . _namedCycleNodes = { }
parser . _namedCycleNodes [ name ] = node
else :
values = [ parser . compile_filter ( arg ) for arg in args [ 1 : ] ]
node = CycleNode ( values )
return node
@ register . tag
def csrf_token ( parser , token ) :
return CsrfTokenNode ( )
@ register . tag
def debug ( parser , token ) :
return DebugNode ( )
@ register . tag ( 'filter' )
def do_filter ( parser , token ) :
_ , rest = token . contents . split ( None , 1 )
filter_expr = parser . compile_filter ( "var|%s" % ( rest ) )
for func , unused in filter_expr . filters :
filter_name = getattr ( func , '_filter_name' , None )
if filter_name in ( 'escape' , 'safe' ) :
raise TemplateSyntaxError ( '"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name )
nodelist = parser . parse ( ( 'endfilter' , ) )
parser . delete_first_token ( )
return FilterNode ( filter_expr , nodelist )
@ register . tag
def firstof ( parser , token ) :
bits = token . split_contents ( ) [ 1 : ]
if len ( bits ) < 1 :
raise TemplateSyntaxError ( "'firstof' statement requires at least one argument" )
return FirstOfNode ( [ parser . compile_filter ( bit ) for bit in bits ] )
@ register . tag ( 'for' )
def do_for ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) < 4 :
raise TemplateSyntaxError ( "'for' statements should have at least four" " words: %s" % token . contents )
is_reversed = bits [ - 1 ] == 'reversed'
in_index = - 3 if is_reversed else - 2
if bits [ in_index ] != 'in' :
raise TemplateSyntaxError ( "'for' statements should use the format" " 'for x in y': %s" % token . contents )
loopvars = re . split ( r' *, *' , ' ' . join ( bits [ 1 : in_index ] ) )
for var in loopvars :
if not var or ' ' in var :
raise TemplateSyntaxError ( "'for' tag received an invalid argument:" " %s" % token . contents )
sequence = parser . compile_filter ( bits [ in_index + 1 ] )
nodelist_loop = parser . parse ( ( 'empty' , 'endfor' , ) )
token = parser . next_token ( )
if token . contents == 'empty' :
nodelist_empty = parser . parse ( ( 'endfor' , ) )
parser . delete_first_token ( )
else :
nodelist_empty = None
return ForNode ( loopvars , sequence , is_reversed , nodelist_loop , nodelist_empty )
def do_ifequal ( parser , token , negate ) :
bits = list ( token . split_contents ( ) )
if len ( bits ) != 3 :
raise TemplateSyntaxError ( "%r takes two arguments" % bits [ 0 ] )
end_tag = 'end' + bits [ 0 ]
nodelist_loop = parser . parse ( ( 'else' , end_tag ) )
token = parser . next_token ( )
if token . contents == 'else' :
nodelist_false = parser . parse ( ( end_tag , ) )
parser . delete_first_token ( )
else :
nodelist_false = NodeList ( )
val1 = parser . compile_filter ( bits [ 1 ] )
val2 = parser . compile_filter ( bits [ 2 ] )
return IfEqualNode ( val1 , val2 , nodelist_true , nodelist_false , negate )
@ register . tag
def ifequal ( parser , token ) :
return do_ifequal ( parser , token , False )
@ register . tag
def ifnotequal ( parser , token ) :
return do_ifequal ( parser , token , True )
class TemplateLiteral ( Literal ) :
def __init__ ( self , value , text ) :
self . value = value
self . text = text
def display ( self ) :
return self . text
def eval ( self , context ) :
return self . value . resolve ( context , ignore_failures = True )
class TemplateIfParser ( IfParser ) :
error_class = TemplateSyntaxError
def __init__ ( self , parser , * args , ** kwargs ) :
self . template_parser = parser
super ( TemplateIfParser , self ) . __init__ ( * args , ** kwargs )
def create_var ( self , value ) :
return TemplateLiteral ( self . template_parser . compile_filter ( value ) , value )
@ register . tag ( 'if' )
def do_if ( parser , token ) :
bits = token . split_contents ( ) [ 1 : ]
condition = TemplateIfParser ( parser , bits ) . parse ( )
nodelist = parser . parse ( ( 'elif' , 'else' , 'endif' ) )
conditions_nodelists = [ ( condition , nodelist ) ]
token = parser . next_token ( )
while token . contents . startswith ( 'elif' ) :
bits = token . split_contents ( ) [ 1 : ]
condition = TemplateIfParser ( parser , bits ) . parse ( )
nodelist = parser . parse ( ( 'elif' , 'else' , 'endif' ) )
conditions_nodelists . append ( ( condition , nodelist ) )
token = parser . next_token ( )
if token . contents == 'else' :
nodelist = parser . parse ( ( 'endif' , ) )
conditions_nodelists . append ( ( None , nodelist ) )
token = parser . next_token ( )
assert token . contents == 'endif'
return IfNode ( conditions_nodelists )
@ register . tag
def ifchanged ( parser , token ) :
bits = token . split_contents ( )
nodelist_true = parser . parse ( ( 'else' , 'endifchanged' ) )
token = parser . next_token ( )
if token . contents == 'else' :
nodelist_false = parser . parse ( ( 'endifchanged' , ) )
parser . delete_first_token ( )
else :
nodelist_false = NodeList ( )
values = [ parser . compile_filter ( bit ) for bit in bits [ 1 : ] ]
return IfChangedNode ( nodelist_true , nodelist_false , * values )
@ register . tag
def ssi ( parser , token ) :
bits = token . split_contents ( )
parsed = False
if len ( bits ) not in ( 2 , 3 ) :
raise TemplateSyntaxError ( "'ssi' tag takes one argument: the path to" " the file to be included" )
if len ( bits ) == 3 :
if bits [ 2 ] == 'parsed' :
parsed = True
else :
raise TemplateSyntaxError ( "Second (optional) argument to %s tag" " must be 'parsed'" % bits [ 0 ] )
filepath = parser . compile_filter ( bits [ 1 ] )
return SsiNode ( filepath , parsed )
@ register . tag
def load ( parser , token ) :
bits = token . contents . split ( )
if len ( bits ) >= 4 and bits [ - 2 ] == "from" :
try :
taglib = bits [ - 1 ]
lib = get_library ( taglib )
except InvalidTemplateLibrary as e :
raise TemplateSyntaxError ( "'%s' is not a valid tag library: %s" % ( taglib , e ) )
else :
temp_lib = Library ( )
for name in bits [ 1 : - 2 ] :
if name in lib . tags :
temp_lib . tags [ name ] = lib . tags [ name ]
if name in lib . filters :
temp_lib . filters [ name ] = lib . filters [ name ]
elif name in lib . filters :
temp_lib . filters [ name ] = lib . filters [ name ]
else :
raise TemplateSyntaxError ( "'%s' is not a valid tag or filter in tag library '%s'" % ( name , taglib ) )
parser . add_library ( temp_lib )
else :
for taglib in bits [ 1 : ] :
try :
lib = get_library ( taglib )
parser . add_library ( lib )
except InvalidTemplateLibrary as e :
raise TemplateSyntaxError ( "'%s' is not a valid tag library: %s" % ( taglib , e ) )
return LoadNode ( )
@ register . tag
def now ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) != 2 :
raise TemplateSyntaxError ( "'now' statement takes one argument" )
format_string = bits [ 1 ] [ 1 : - 1 ]
return NowNode ( format_string )
@ register . tag
def regroup ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) != 6 :
raise TemplateSyntaxError ( "'regroup' tag takes five arguments" )
target = parser . compile_filter ( bits [ 1 ] )
if bits [ 2 ] != 'by' :
raise TemplateSyntaxError ( "second argument to 'regroup' tag must be 'by'" )
if bits [ 4 ] != 'as' :
raise TemplateSyntaxError ( "next-to-last argument to 'regroup' tag must" " be 'as'" )
var_name = bits [ 5 ]
expression = parser . compile_filter ( var_name + VARIABLE_ATTRIBUTE_SEPARATOR + bits [ 3 ] )
return RegroupNode ( target , expression , var_name )
@ register . tag
def spaceless ( parser , token ) :
nodelist = parser . parse ( ( 'endspaceless' , ) )
parser . delete_first_token ( )
return SpacelessNode ( nodelist )
@ register . tag
def templatetag ( parser , token ) :
bits = token . contents . split ( )
if len ( bits ) != 2 :
raise TemplateSyntaxError ( "'templatetag' statement takes one argument" )
tag = bits [ 1 ]
if tag not in TemplateTagNode . mapping :
raise TemplateSyntaxError ( "Invalid templatetag argument: '%s'." " Must be one of: %s" % ( tag , list ( TemplateTagNode . mapping ) ) )
return TemplateTagNode ( tag )
@ register . tag
def url ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) < 2 :
raise TemplateSyntaxError ( "'%s' takes at least one argument" " (path to a view)" % bits [ 0 ] )
viewname = parser . compile_filter ( bits [ 1 ] )
args = [ ]
kwargs = { }
asvar = None
bits = bits [ 2 : ]
if len ( bits ) >= 2 and bits [ - 2 ] == 'as' :
asvar = bits [ - 1 ]
bits = bits [ : - 2 ]
if len ( bits ) :
for bit in bits :
match = kwarg_re . match ( bit )
if not match :
raise TemplateSyntaxError ( "Malformed arguments to url tag" )
name , value = match . groups ( )
if name :
kwargs [ name ] = parser . compile_filter ( value )
else :
args . append ( parser . compile_filter ( value ) )
return URLNode ( viewname , args , kwargs , asvar )
@ register . tag
def verbatim ( parser , token ) :
nodelist = parser . parse ( ( 'endverbatim' , ) )
parser . delete_first_token ( )
return VerbatimNode ( nodelist . render ( Context ( ) ) )
@ register . tag
def widthratio ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) == 4 :
tag , this_value_expr , max_value_expr , max_width = bits
asvar = None
elif len ( bits ) == 6 :
tag , this_value_expr , max_value_expr , max_width , as_ , asvar = bits
if as_ != 'as' :
raise TemplateSyntaxError ( "Invalid syntax in widthratio tag. Expecting 'as' keyword" )
else :
raise TemplateSyntaxError ( "widthratio takes at least three arguments" )
return WidthRatioNode ( parser . compile_filter ( this_value_expr ) , parser . compile_filter ( max_value_expr ) , parser . compile_filter ( max_width ) , asvar = asvar )
@ register . tag ( 'with' )
def do_with ( parser , token ) :
bits = token . split_contents ( )
remaining_bits = bits [ 1 : ]
extra_context = token_kwargs ( remaining_bits , parser , support_legacy = True )
if not extra_context :
raise TemplateSyntaxError ( "%r expected at least one variable " "assignment" % bits [ 0 ] )
if remaining_bits :
raise TemplateSyntaxError ( "%r received an invalid token: %r" % ( bits [ 0 ] , remaining_bits [ 0 ] ) )
nodelist = parser . parse ( ( 'endwith' , ) )
parser . delete_first_token ( )
return WithNode ( None , None , nodelist , extra_context = extra_context )
from django . core . exceptions import ImproperlyConfigured
from django . template . base import Origin , Template , Context , TemplateDoesNotExist
from django . conf import settings
from django . utils . module_loading import import_string
from django . utils import six
template_source_loaders = None
class BaseLoader ( object ) :
is_usable = False
def __init__ ( self , * args , ** kwargs ) :
pass
def __call__ ( self , template_name , template_dirs = None ) :
return self . load_template ( template_name , template_dirs )
def load_template ( self , template_name , template_dirs = None ) :
source , display_name = self . load_template_source ( template_name , template_dirs )
origin = make_origin ( display_name , self . load_template_source , template_name , template_dirs )
try :
template = get_template_from_string ( source , origin , template_name )
return template , None
except TemplateDoesNotExist :
return source , display_name
def load_template_source ( self , template_name , template_dirs = None ) :
raise NotImplementedError ( 'subclasses of BaseLoader must provide a load_template_source() method' )
def reset ( self ) :
pass
class LoaderOrigin ( Origin ) :
def __init__ ( self , display_name , loader , name , dirs ) :
super ( LoaderOrigin , self ) . __init__ ( display_name )
self . loader , self . loadname , self . dirs = loader , name , dirs
def reload ( self ) :
return self . loader ( self . loadname , self . dirs ) [ 0 ]
def make_origin ( display_name , loader , name , dirs ) :
if settings . TEMPLATE_DEBUG and display_name :
return LoaderOrigin ( display_name , loader , name , dirs )
else :
return None
def find_template_loader ( loader ) :
if isinstance ( loader , ( tuple , list ) ) :
loader , args = loader [ 0 ] , loader [ 1 : ]
else :
args = [ ]
if isinstance ( loader , six . string_types ) :
TemplateLoader = import_string ( loader )
if hasattr ( TemplateLoader , 'load_template_source' ) :
func = TemplateLoader ( * args )
else :
if args :
raise ImproperlyConfigured ( "Error importing template source loader %s - can't pass arguments to function-based loader." % loader )
func = TemplateLoader
if not func . is_usable :
import warnings
warnings . warn ( "Your TEMPLATE_LOADERS setting includes %r, but your Python installation doesn't support that type of template loading. Consider removing that line from TEMPLATE_LOADERS." % loader )
return None
else :
return func
else :
raise ImproperlyConfigured ( 'Loader does not define a "load_template" callable template source loader' )
def find_template ( name , dirs = None ) :
global template_source_loaders
if template_source_loaders is None :
loaders = [ ]
for loader_name in settings . TEMPLATE_LOADERS :
loader = find_template_loader ( loader_name )
if loader is not None :
loaders . append ( loader )
template_source_loaders = tuple ( loaders )
for loader in template_source_loaders :
try :
source , display_name = loader ( name , dirs )
return ( source , make_origin ( display_name , loader , name , dirs ) )
except TemplateDoesNotExist :
pass
raise TemplateDoesNotExist ( name )
def get_template ( template_name , dirs = None ) :
template , origin = find_template ( template_name , dirs )
if not hasattr ( template , 'render' ) :
template = get_template_from_string ( template , origin , template_name )
return template
def get_template_from_string ( source , origin = None , name = None ) :
return Template ( source , origin , name )
def render_to_string ( template_name , dictionary = None , context_instance = None , dirs = None ) :
if isinstance ( template_name , ( list , tuple ) ) :
t = select_template ( template_name , dirs )
else :
t = get_template ( template_name , dirs )
if not context_instance :
return t . render ( Context ( dictionary ) )
if not dictionary :
return t . render ( context_instance )
with context_instance . push ( dictionary ) :
return t . render ( context_instance )
def select_template ( template_name_list , dirs = None ) :
if not template_name_list :
raise TemplateDoesNotExist ( "No template names provided" )
not_found = [ ]
for template_name in template_name_list :
try :
return get_template ( template_name , dirs )
except TemplateDoesNotExist as e :
if e . args [ 0 ] not in not_found :
not_found . append ( e . args [ 0 ] )
continue
raise TemplateDoesNotExist ( ', ' . join ( not_found ) )
from collections import defaultdict
from django . conf import settings
from django . template . base import TemplateSyntaxError , Library , Node , TextNode , token_kwargs , Variable
from django . template . loader import get_template
from django . utils . safestring import mark_safe
from django . utils import six
register = Library ( )
BLOCK_CONTEXT_KEY = 'block_context'
class ExtendsError ( Exception ) :
pass
class BlockContext ( object ) :
def __init__ ( self ) :
self . blocks = defaultdict ( list )
def add_blocks ( self , blocks ) :
for name , block in six . iteritems ( blocks ) :
self . blocks [ name ] . insert ( 0 , block )
def pop ( self , name ) :
try :
return self . blocks [ name ] . pop ( )
except IndexError :
return None
def push ( self , name , block ) :
self . blocks [ name ] . append ( block )
def get_block ( self , name ) :
try :
return self . blocks [ name ] [ - 1 ]
except IndexError :
return None
class BlockNode ( Node ) :
def __init__ ( self , name , nodelist , parent = None ) :
self . name , self . nodelist , self . parent = name , nodelist , parent
def __repr__ ( self ) :
return "" % ( self . name , self . nodelist )
def render ( self , context ) :
block_context = context . render_context . get ( BLOCK_CONTEXT_KEY )
with context . push ( ) :
if block_context is None :
context [ 'block' ] = self
result = self . nodelist . render ( context )
else :
push = block = block_context . pop ( self . name )
if block is None :
block = self
block = type ( self ) ( block . name , block . nodelist )
block . context = context
context [ 'block' ] = block
result = block . nodelist . render ( context )
if push is not None :
block_context . push ( self . name , push )
return result
def super ( self ) :
render_context = self . context . render_context
if ( BLOCK_CONTEXT_KEY in render_context and render_context [ BLOCK_CONTEXT_KEY ] . get_block ( self . name ) is not None ) :
return mark_safe ( self . render ( self . context ) )
return ''
class ExtendsNode ( Node ) :
must_be_first = True
def __init__ ( self , nodelist , parent_name , template_dirs = None ) :
self . nodelist = nodelist
self . parent_name = parent_name
self . template_dirs = template_dirs
self . blocks = dict ( ( n . name , n ) for n in nodelist . get_nodes_by_type ( BlockNode ) )
def __repr__ ( self ) :
return '' % self . parent_name . token
def get_parent ( self , context ) :
parent = self . parent_name . resolve ( context )
if not parent :
error_msg = "Invalid template name in 'extends' tag: %r." % parent
if self . parent_name . filters or isinstance ( self . parent_name . var , Variable ) :
error_msg += " Got this from the '%s' variable." % self . parent_name . token
raise TemplateSyntaxError ( error_msg )
if hasattr ( parent , 'render' ) :
return parent
return get_template ( parent )
def render ( self , context ) :
compiled_parent = self . get_parent ( context )
if BLOCK_CONTEXT_KEY not in context . render_context :
context . render_context [ BLOCK_CONTEXT_KEY ] = BlockContext ( )
block_context = context . render_context [ BLOCK_CONTEXT_KEY ]
block_context . add_blocks ( self . blocks )
for node in compiled_parent . nodelist :
if not isinstance ( node , TextNode ) :
if not isinstance ( node , ExtendsNode ) :
blocks = dict ( ( n . name , n ) for n in compiled_parent . nodelist . get_nodes_by_type ( BlockNode ) )
block_context . add_blocks ( blocks )
break
return compiled_parent . _render ( context )
class IncludeNode ( Node ) :
def __init__ ( self , template , * args , ** kwargs ) :
self . template = template
self . extra_context = kwargs . pop ( 'extra_context' , { } )
self . isolated_context = kwargs . pop ( 'isolated_context' , False )
super ( IncludeNode , self ) . __init__ ( * args , ** kwargs )
def render ( self , context ) :
try :
template = self . template . resolve ( context )
if not callable ( getattr ( template , 'render' , None ) ) :
template = get_template ( template )
values = { name : var . resolve ( context ) for name , var in six . iteritems ( self . extra_context ) }
if self . isolated_context :
return template . render ( context . new ( values ) )
with context . push ( ** values ) :
return template . render ( context )
except Exception :
if settings . TEMPLATE_DEBUG :
raise
return ''
@ register . tag ( 'block' )
def do_block ( parser , token ) :
bits = token . contents . split ( )
if len ( bits ) != 2 :
raise TemplateSyntaxError ( "'%s' tag takes only one argument" % bits [ 0 ] )
block_name = bits [ 1 ]
try :
if block_name in parser . __loaded_blocks :
raise TemplateSyntaxError ( "'%s' tag with name '%s' appears more than once" % ( bits [ 0 ] , block_name ) )
parser . __loaded_blocks . append ( block_name )
except AttributeError :
parser . __loaded_blocks = [ block_name ]
nodelist = parser . parse ( ( 'endblock' , ) )
endblock = parser . next_token ( )
acceptable_endblocks = ( 'endblock' , 'endblock %s' % block_name )
if endblock . contents not in acceptable_endblocks :
parser . invalid_block_tag ( endblock , 'endblock' , acceptable_endblocks )
return BlockNode ( block_name , nodelist )
@ register . tag ( 'extends' )
def do_extends ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) != 2 :
raise TemplateSyntaxError ( "'%s' takes one argument" % bits [ 0 ] )
parent_name = parser . compile_filter ( bits [ 1 ] )
nodelist = parser . parse ( )
if nodelist . get_nodes_by_type ( ExtendsNode ) :
raise TemplateSyntaxError ( "'%s' cannot appear more than once in the same template" % bits [ 0 ] )
return ExtendsNode ( nodelist , parent_name )
@ register . tag ( 'include' )
def do_include ( parser , token ) :
bits = token . split_contents ( )
if len ( bits ) < 2 :
raise TemplateSyntaxError ( "%r tag takes at least one argument: the name of the template to be included." % bits [ 0 ] )
options = { }
remaining_bits = bits [ 2 : ]
while remaining_bits :
option = remaining_bits . pop ( 0 )
if option in options :
raise TemplateSyntaxError ( 'The %r option was specified more ' 'than once.' % option )
if option == 'with' :
value = token_kwargs ( remaining_bits , parser , support_legacy = False )
if not value :
raise TemplateSyntaxError ( '"with" in %r tag needs at least ' 'one keyword argument.' % bits [ 0 ] )
elif option == 'only' :
value = True
else :
raise TemplateSyntaxError ( 'Unknown argument for %r tag: %r.' % ( bits [ 0 ] , option ) )
options [ option ] = value
isolated_context = options . get ( 'only' , False )
namemap = options . get ( 'with' , { } )
return IncludeNode ( parser . compile_filter ( bits [ 1 ] ) , extra_context = namemap , isolated_context = isolated_context )
import os
import sys
from django . apps import apps
from django . conf import settings
from django . template . base import TemplateDoesNotExist
from django . template . loader import BaseLoader
from django . utils . _os import safe_join
from django . utils import six
def calculate_app_template_dirs ( ) :
if six . PY2 :
fs_encoding = sys . getfilesystemencoding ( ) or sys . getdefaultencoding ( )
app_template_dirs = [ ]
for app_config in apps . get_app_configs ( ) :
if not app_config . path :
continue
template_dir = os . path . join ( app_config . path , 'templates' )
if os . path . isdir ( template_dir ) :
if six . PY2 :
template_dir = template_dir . decode ( fs_encoding )
app_template_dirs . append ( template_dir )
return tuple ( app_template_dirs )
app_template_dirs = calculate_app_template_dirs ( )
class Loader ( BaseLoader ) :
is_usable = True
def get_template_sources ( self , template_name , template_dirs = None ) :
if not template_dirs :
template_dirs = app_template_dirs
for template_dir in template_dirs :
try :
yield safe_join ( template_dir , template_name )
except UnicodeDecodeError :
raise
except ValueError :
pass
def load_template_source ( self , template_name , template_dirs = None ) :
for filepath in self . get_template_sources ( template_name , template_dirs ) :
try :
with open ( filepath , 'rb' ) as fp :
return ( fp . read ( ) . decode ( settings . FILE_CHARSET ) , filepath )
except IOError :
pass
raise TemplateDoesNotExist ( template_name )
import hashlib
from django . template . base import TemplateDoesNotExist
from django . template . loader import BaseLoader , get_template_from_string , find_template_loader , make_origin
from django . utils . encoding import force_bytes
class Loader ( BaseLoader ) :
is_usable = True
def __init__ ( self , loaders ) :
self . template_cache = { }
self . find_template_cache = { }
self . _loaders = loaders
self . _cached_loaders = [ ]
@ property
def loaders ( self ) :
if not self . _cached_loaders :
cached_loaders = [ ]
for loader in self . _loaders :
cached_loaders . append ( find_template_loader ( loader ) )
self . _cached_loaders = cached_loaders
return self . _cached_loaders
def cache_key ( self , template_name , template_dirs ) :
if template_dirs :
return '-' . join ( [ template_name , hashlib . sha1 ( force_bytes ( '|' . join ( template_dirs ) ) ) . hexdigest ( ) ] )
else :
return template_name
def find_template ( self , name , dirs = None ) :
key = self . cache_key ( name , dirs )
try :
result = self . find_template_cache [ key ]
except KeyError :
result = None
for loader in self . loaders :
try :
template , display_name = loader ( name , dirs )
except TemplateDoesNotExist :
pass
else :
result = ( template , make_origin ( display_name , loader , name , dirs ) )
break
self . find_template_cache [ key ] = result
if result :
return result
else :
self . template_cache [ key ] = TemplateDoesNotExist
raise TemplateDoesNotExist ( name )
def load_template ( self , template_name , template_dirs = None ) :
key = self . cache_key ( template_name , template_dirs )
template_tuple = self . template_cache . get ( key )
if template_tuple is TemplateDoesNotExist :
raise TemplateDoesNotExist
elif template_tuple is None :
template , origin = self . find_template ( template_name , template_dirs )
if not hasattr ( template , 'render' ) :
try :
template = get_template_from_string ( template , origin , template_name )
except TemplateDoesNotExist :
self . template_cache [ key ] = ( template , origin )
self . template_cache [ key ] = ( template , None )
return self . template_cache [ key ]
def reset ( self ) :
self . template_cache . clear ( )
self . find_template_cache . clear ( )
from __future__ import unicode_literals
try :
from pkg_resources import resource_string
except ImportError :
resource_string = None
from django . apps import apps
from django . conf import settings
from django . template . base import TemplateDoesNotExist
from django . template . loader import BaseLoader
from django . utils import six
class Loader ( BaseLoader ) :
is_usable = resource_string is not None
def load_template_source ( self , template_name , template_dirs = None ) :
if resource_string is not None :
pkg_name = 'templates/' + template_name
for app_config in apps . get_app_configs ( ) :
try :
resource = resource_string ( app_config . name , pkg_name )
except Exception :
continue
if six . PY2 :
resource = resource . decode ( settings . FILE_CHARSET )
return ( resource , 'egg:%s:%s' % ( app_config . name , pkg_name ) )
raise TemplateDoesNotExist ( template_name )
from django . conf import settings
from django . template . base import TemplateDoesNotExist
from django . template . loader import BaseLoader
from django . utils . _os import safe_join
class Loader ( BaseLoader ) :
is_usable = True
def get_template_sources ( self , template_name , template_dirs = None ) :
if not template_dirs :
template_dirs = settings . TEMPLATE_DIRS
for template_dir in template_dirs :
try :
yield safe_join ( template_dir , template_name )
except UnicodeDecodeError :
raise
except ValueError :
pass
def load_template_source ( self , template_name , template_dirs = None ) :
tried = [ ]
for filepath in self . get_template_sources ( template_name , template_dirs ) :
try :
with open ( filepath , 'rb' ) as fp :
return ( fp . read ( ) . decode ( settings . FILE_CHARSET ) , filepath )
except IOError :
tried . append ( filepath )
if tried :
error_msg = "Tried %s" % tried
else :
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist ( error_msg )
load_template_source . is_usable = True
from django . http import HttpResponse
from django . template import loader , Context , RequestContext
from django . utils import six
class ContentNotRenderedError ( Exception ) :
pass
class SimpleTemplateResponse ( HttpResponse ) :
rendering_attrs = [ 'template_name' , 'context_data' , '_post_render_callbacks' ]
def __init__ ( self , template , context = None , content_type = None , status = None ) :
self . template_name = template
self . context_data = context
self . _post_render_callbacks = [ ]
super ( SimpleTemplateResponse , self ) . __init__ ( '' , content_type , status )
self . _is_rendered = False
def __getstate__ ( self ) :
obj_dict = super ( SimpleTemplateResponse , self ) . __getstate__ ( )
if not self . _is_rendered :
raise ContentNotRenderedError ( 'The response content must be ' 'rendered before it can be pickled.' )
for attr in self . rendering_attrs :
if attr in obj_dict :
del obj_dict [ attr ]
return obj_dict
def resolve_template ( self , template ) :
if isinstance ( template , ( list , tuple ) ) :
return loader . select_template ( template )
elif isinstance ( template , six . string_types ) :
return loader . get_template ( template )
else :
return template
def resolve_context ( self , context ) :
if isinstance ( context , Context ) :
return context
else :
return Context ( context )
@ property
def rendered_content ( self ) :
template = self . resolve_template ( self . template_name )
context = self . resolve_context ( self . context_data )
content = template . render ( context )
return content
def add_post_render_callback ( self , callback ) :
if self . _is_rendered :
callback ( self )
else :
self . _post_render_callbacks . append ( callback )
def render ( self ) :
retval = self
if not self . _is_rendered :
self . content = self . rendered_content
for post_callback in self . _post_render_callbacks :
newretval = post_callback ( retval )
if newretval is not None :
retval = newretval
return retval
@ property
def is_rendered ( self ) :
return self . _is_rendered
def __iter__ ( self ) :
if not self . _is_rendered :
raise ContentNotRenderedError ( 'The response content must be ' 'rendered before it can be iterated over.' )
return super ( SimpleTemplateResponse , self ) . __iter__ ( )
@ property
def content ( self ) :
if not self . _is_rendered :
raise ContentNotRenderedError ( 'The response content must be ' 'rendered before it can be accessed.' )
return super ( SimpleTemplateResponse , self ) . content
@ content . setter
def content ( self , value ) :
HttpResponse . content . fset ( self , value )
self . _is_rendered = True
class TemplateResponse ( SimpleTemplateResponse ) :
rendering_attrs = SimpleTemplateResponse . rendering_attrs + [ '_request' , '_current_app' ]
def __init__ ( self , request , template , context = None , content_type = None , status = None , current_app = None ) :
self . _request = request
self . _current_app = current_app
super ( TemplateResponse , self ) . __init__ ( template , context , content_type , status )
def resolve_context ( self , context ) :
if isinstance ( context , Context ) :
return context
return RequestContext ( self . _request , context , current_app = self . _current_app )
class TokenBase ( object ) :
id = None
value = None
first = second = None
def nud ( self , parser ) :
raise parser . error_class ( "Not expecting '%s' in this position in if tag." % self . id )
def led ( self , left , parser ) :
raise parser . error_class ( "Not expecting '%s' as infix operator in if tag." % self . id )
def display ( self ) :
return self . id
def __repr__ ( self ) :
out = [ str ( x ) for x in [ self . id , self . first , self . second ] if x is not None ]
return "(" + " " . join ( out ) + ")"
def infix ( bp , func ) :
class Operator ( TokenBase ) :
lbp = bp
def led ( self , left , parser ) :
self . first = left
self . second = parser . expression ( bp )
return self
def eval ( self , context ) :
try :
return func ( context , self . first , self . second )
except Exception :
return False
return Operator
def prefix ( bp , func ) :
class Operator ( TokenBase ) :
lbp = bp
def nud ( self , parser ) :
self . first = parser . expression ( bp )
self . second = None
return self
def eval ( self , context ) :
try :
return func ( context , self . first )
except Exception :
return False
return Operator
OPERATORS = { 'or' : infix ( 6 , lambda context , x , y : x . eval ( context ) or y . eval ( context ) ) , 'and' : infix ( 7 , lambda context , x , y : x . eval ( context ) and y . eval ( context ) ) , 'not' : prefix ( 8 , lambda context , x : not x . eval ( context ) ) , 'in' : infix ( 9 , lambda context , x , y : x . eval ( context ) in y . eval ( context ) ) , 'not in' : infix ( 9 , lambda context , x , y : x . eval ( context ) not in y . eval ( context ) ) , '=' : infix ( 10 , lambda context , x , y : x . eval ( context ) == y . eval ( context ) ) , '==' : infix ( 10 , lambda context , x , y : x . eval ( context ) == y . eval ( context ) ) , '!=' : infix ( 10 , lambda context , x , y : x . eval ( context ) != y . eval ( context ) ) , '>' : infix ( 10 , lambda context , x , y : x . eval ( context ) > y . eval ( context ) ) , '>=' : infix ( 10 , lambda context , x , y : x . eval ( context ) >= y . eval ( context ) ) , '<' : infix ( 10 , lambda context , x , y : x . eval ( context ) < y . eval ( context ) ) , '<=' : infix ( 10 , lambda context , x , y : x . eval ( context ) <= y . eval ( context ) ) , }
for key , op in OPERATORS . items ( ) :
op . id = key
class Literal ( TokenBase ) :
id = "literal"
lbp = 0
def __init__ ( self , value ) :
self . value = value
def display ( self ) :
return repr ( self . value )
def nud ( self , parser ) :
return self
def eval ( self , context ) :
return self . value
def __repr__ ( self ) :
return "(%s %r)" % ( self . id , self . value )
class EndToken ( TokenBase ) :
lbp = 0
def nud ( self , parser ) :
raise parser . error_class ( "Unexpected end of expression in if tag." )
EndToken = EndToken ( )
class IfParser ( object ) :
error_class = ValueError
def __init__ ( self , tokens ) :
l = len ( tokens )
mapped_tokens = [ ]
i = 0
while i < l :
token = tokens [ i ]
if token == "not" and i + 1 < l and tokens [ i + 1 ] == "in" :
token = "not in"
i += 1
mapped_tokens . append ( self . translate_token ( token ) )
i += 1
self . tokens = mapped_tokens
self . pos = 0
self . current_token = self . next_token ( )
def translate_token ( self , token ) :
try :
op = OPERATORS [ token ]
except ( KeyError , TypeError ) :
return self . create_var ( token )
else :
return op ( )
def next_token ( self ) :
if self . pos >= len ( self . tokens ) :
return EndToken
else :
retval = self . tokens [ self . pos ]
self . pos += 1
return retval
def parse ( self ) :
retval = self . expression ( )
if self . current_token is not EndToken :
raise self . error_class ( "Unused '%s' at end of if expression." % self . current_token . display ( ) )
return retval
def expression ( self , rbp = 0 ) :
t = self . current_token
self . current_token = self . next_token ( )
left = t . nud ( self )
while rbp < self . current_token . lbp :
t = self . current_token
self . current_token = self . next_token ( )
left = t . led ( left , self )
return left
def create_var ( self , value ) :
return Literal ( value )
from __future__ import unicode_literals
from lib2to3 import fixer_base
from lib2to3 . fixer_util import find_indentation , Name , syms , touch_import
from lib2to3 . pgen2 import token
from lib2to3 . pytree import Leaf , Node
class FixUnicode ( fixer_base . BaseFix ) :
BM_compatible = True
PATTERN = """ classdef< 'class' any+ ':' suite< any* funcdef< 'def' unifunc='__unicode__' parameters< '(' NAME ')' > any+ > any* > > """
def transform ( self , node , results ) :
unifunc = results [ "unifunc" ]
strfunc = Name ( "__str__" , prefix = unifunc . prefix )
unifunc . replace ( strfunc )
klass = node . clone ( )
klass . prefix = '\n' + find_indentation ( node )
decorator = Node ( syms . decorator , [ Leaf ( token . AT , "@" ) , Name ( 'python_2_unicode_compatible' ) ] )
decorated = Node ( syms . decorated , [ decorator , klass ] , prefix = node . prefix )
node . replace ( decorated )
touch_import ( 'django.utils.encoding' , 'python_2_unicode_compatible' , decorated )
import os
import stat
import sys
import tempfile
from os . path import join , normcase , normpath , abspath , isabs , sep , dirname
from django . utils . encoding import force_text
from django . utils import six
try :
WindowsError = WindowsError
except NameError :
class WindowsError ( Exception ) :
pass
if six . PY2 :
fs_encoding = sys . getfilesystemencoding ( ) or sys . getdefaultencoding ( )
if six . PY3 or os . name == 'nt' :
abspathu = abspath
else :
def abspathu ( path ) :
if not isabs ( path ) :
path = join ( os . getcwdu ( ) , path )
return normpath ( path )
def upath ( path ) :
if six . PY2 and not isinstance ( path , six . text_type ) :
return path . decode ( fs_encoding )
return path
def npath ( path ) :
if six . PY2 and not isinstance ( path , bytes ) :
return path . encode ( fs_encoding )
return path
def safe_join ( base , * paths ) :
base = force_text ( base )
paths = [ force_text ( p ) for p in paths ]
final_path = abspathu ( join ( base , * paths ) )
base_path = abspathu ( base )
if ( not normcase ( final_path ) . startswith ( normcase ( base_path + sep ) ) and normcase ( final_path ) != normcase ( base_path ) and dirname ( normcase ( base_path ) ) != normcase ( base_path ) ) :
raise ValueError ( 'The joined path (%s) is located outside of the base ' 'path component (%s)' % ( final_path , base_path ) )
return final_path
def rmtree_errorhandler ( func , path , exc_info ) :
exctype , value = exc_info [ : 2 ]
if exctype is not WindowsError or 'Access is denied' not in str ( value ) :
raise
if ( ( os . stat ( path ) . st_mode & stat . S_IREAD ) != stat . S_IREAD ) :
raise
os . chmod ( path , stat . S_IWRITE )
func ( path )
def symlinks_supported ( ) :
tmpdir = tempfile . mkdtemp ( )
original_path = os . path . join ( tmpdir , 'original' )
symlink_path = os . path . join ( tmpdir , 'symlink' )
os . makedirs ( original_path )
try :
os . symlink ( original_path , symlink_path )
supported = True
except ( OSError , NotImplementedError , AttributeError ) :
supported = False
else :
os . remove ( symlink_path )
finally :
os . rmdir ( original_path )
os . rmdir ( tmpdir )
return supported
import os
import shutil
import tarfile
import zipfile
from django . utils import six
class ArchiveException ( Exception ) :
class UnrecognizedArchiveFormat ( ArchiveException ) :
def extract ( path , to_path = '' ) :
with Archive ( path ) as archive :
archive . extract ( to_path )
class Archive ( object ) :
def __init__ ( self , file ) :
self . _archive = self . _archive_cls ( file ) ( file )
@ staticmethod
def _archive_cls ( file ) :
cls = None
if isinstance ( file , six . string_types ) :
filename = file
else :
try :
filename = file . name
except AttributeError :
raise UnrecognizedArchiveFormat ( "File object not a recognized archive format." )
base , tail_ext = os . path . splitext ( filename . lower ( ) )
cls = extension_map . get ( tail_ext )
if not cls :
base , ext = os . path . splitext ( base )
cls = extension_map . get ( ext )
if not cls :
raise UnrecognizedArchiveFormat ( "Path not a recognized archive format: %s" % filename )
return cls
def __enter__ ( self ) :
return self
def __exit__ ( self , exc_type , exc_value , traceback ) :
self . close ( )
def extract ( self , to_path = '' ) :
self . _archive . extract ( to_path )
def list ( self ) :
self . _archive . list ( )
def close ( self ) :
self . _archive . close ( )
class BaseArchive ( object ) :
def split_leading_dir ( self , path ) :
path = str ( path )
path = path . lstrip ( '/' ) . lstrip ( '\\' )
if '/' in path and ( ( '\\' in path and path . find ( '/' ) < path . find ( '\\' ) ) or '\\' not in path ) :
return path . split ( '/' , 1 )
elif '\\' in path :
return path . split ( '\\' , 1 )
else :
return path , ''
def has_leading_dir ( self , paths ) :
common_prefix = None
for path in paths :
prefix , rest = self . split_leading_dir ( path )
if not prefix :
return False
elif common_prefix is None :
common_prefix = prefix
elif prefix != common_prefix :
return False
return True
def extract ( self ) :
raise NotImplementedError ( 'subclasses of BaseArchive must provide an extract() method' )
def list ( self ) :
raise NotImplementedError ( 'subclasses of BaseArchive must provide a list() method' )
class TarArchive ( BaseArchive ) :
def __init__ ( self , file ) :
self . _archive = tarfile . open ( file )
def list ( self , * args , ** kwargs ) :
self . _archive . list ( * args , ** kwargs )
def extract ( self , to_path ) :
members = [ member for member in self . _archive . getmembers ( ) if member . name != 'pax_global_header' ]
leading = self . has_leading_dir ( x . name for x in members )
for member in members :
name = member . name
if leading :
name = self . split_leading_dir ( name ) [ 1 ]
filename = os . path . join ( to_path , name )
if member . isdir ( ) :
if filename and not os . path . exists ( filename ) :
os . makedirs ( filename )
else :
try :
extracted = self . _archive . extractfile ( member )
except ( KeyError , AttributeError ) as exc :
print ( "In the tar file %s the member %s is invalid: %s" % ( name , member . name , exc ) )
else :
dirname = os . path . dirname ( filename )
if dirname and not os . path . exists ( dirname ) :
os . makedirs ( dirname )
with open ( filename , 'wb' ) as outfile :
shutil . copyfileobj ( extracted , outfile )
finally :
if extracted :
extracted . close ( )
def close ( self ) :
self . _archive . close ( )
class ZipArchive ( BaseArchive ) :
def __init__ ( self , file ) :
self . _archive = zipfile . ZipFile ( file )
def list ( self , * args , ** kwargs ) :
self . _archive . printdir ( * args , ** kwargs )
def extract ( self , to_path ) :
namelist = self . _archive . namelist ( )
leading = self . has_leading_dir ( namelist )
for name in namelist :
data = self . _archive . read ( name )
if leading :
name = self . split_leading_dir ( name ) [ 1 ]
filename = os . path . join ( to_path , name )
dirname = os . path . dirname ( filename )
if dirname and not os . path . exists ( dirname ) :
os . makedirs ( dirname )
if filename . endswith ( ( '/' , '\\' ) ) :
if not os . path . exists ( filename ) :
os . makedirs ( filename )
else :
with open ( filename , 'wb' ) as outfile :
outfile . write ( data )
def close ( self ) :
self . _archive . close ( )
extension_map = { '.tar' : TarArchive , '.tar.bz2' : TarArchive , '.tar.gz' : TarArchive , '.tgz' : TarArchive , '.tz2' : TarArchive , '.zip' : ZipArchive , }
from __future__ import absolute_import
import os
import signal
import sys
import time
import traceback
from django . apps import apps
from django . conf import settings
from django . core . signals import request_finished
try :
from django . utils . six . moves import _thread as thread
except ImportError :
from django . utils . six . moves import _dummy_thread as thread
try :
import threading
except ImportError :
pass
try :
import termios
except ImportError :
termios = None
USE_INOTIFY = False
try :
import pyinotify
fd = pyinotify . INotifyWrapper . create ( ) . inotify_init ( )
if fd >= 0 :
USE_INOTIFY = True
os . close ( fd )
except ImportError :
pass
RUN_RELOADER = True
FILE_MODIFIED = 1
I18N_MODIFIED = 2
_mtimes = { }
_win = ( sys . platform == "win32" )
_error_files = [ ]
_cached_modules = set ( )
_cached_filenames = [ ]
def gen_filenames ( only_new = False ) :
global _cached_modules , _cached_filenames
module_values = set ( sys . modules . values ( ) )
if _cached_modules == module_values :
if only_new :
return [ ]
else :
return _cached_filenames
new_modules = module_values - _cached_modules
new_filenames = [ filename . __file__ for filename in new_modules if hasattr ( filename , '__file__' ) ]
if not _cached_filenames and settings . USE_I18N :
basedirs = [ os . path . join ( os . path . dirname ( os . path . dirname ( __file__ ) ) , 'conf' , 'locale' ) , 'locale' ]
for app_config in reversed ( list ( apps . get_app_configs ( ) ) ) :
basedirs . append ( os . path . join ( app_config . path , 'locale' ) )
basedirs . extend ( settings . LOCALE_PATHS )
basedirs = [ os . path . abspath ( basedir ) for basedir in basedirs if os . path . isdir ( basedir ) ]
for basedir in basedirs :
for dirpath , dirnames , locale_filenames in os . walk ( basedir ) :
for filename in locale_filenames :
if filename . endswith ( '.mo' ) :
new_filenames . append ( os . path . join ( dirpath , filename ) )
if only_new :
filelist = new_filenames
else :
filelist = _cached_filenames + new_filenames + _error_files
filenames = [ ]
for filename in filelist :
if not filename :
continue
if filename . endswith ( ".pyc" ) or filename . endswith ( ".pyo" ) :
filename = filename [ : - 1 ]
if filename . endswith ( "$py.class" ) :
filename = filename [ : - 9 ] + ".py"
if os . path . exists ( filename ) :
filenames . append ( filename )
_cached_modules = _cached_modules . union ( new_modules )
_cached_filenames += new_filenames
return filenames
def reset_translations ( ) :
import gettext
from django . utils . translation import trans_real
gettext . _translations = { }
trans_real . _translations = { }
trans_real . _default = None
trans_real . _active = threading . local ( )
def inotify_code_changed ( ) :
class EventHandler ( pyinotify . ProcessEvent ) :
modified_code = None
def process_default ( self , event ) :
if event . path . endswith ( '.mo' ) :
EventHandler . modified_code = I18N_MODIFIED
else :
EventHandler . modified_code = FILE_MODIFIED
wm = pyinotify . WatchManager ( )
notifier = pyinotify . Notifier ( wm , EventHandler ( ) )
def update_watch ( sender = None , ** kwargs ) :
if sender and getattr ( sender , 'handles_files' , False ) :
return
mask = ( pyinotify . IN_MODIFY | pyinotify . IN_DELETE | pyinotify . IN_ATTRIB | pyinotify . IN_MOVED_FROM | pyinotify . IN_MOVED_TO | pyinotify . IN_CREATE )
for path in gen_filenames ( only_new = True ) :
wm . add_watch ( path , mask )
request_finished . connect ( update_watch )
update_watch ( )
notifier . check_events ( timeout = None )
notifier . read_events ( )
notifier . process_events ( )
notifier . stop ( )
return EventHandler . modified_code
def code_changed ( ) :
global _mtimes , _win
for filename in gen_filenames ( ) :
stat = os . stat ( filename )
mtime = stat . st_mtime
if _win :
mtime -= stat . st_ctime
if filename not in _mtimes :
_mtimes [ filename ] = mtime
continue
if mtime != _mtimes [ filename ] :
_mtimes = { }
try :
del _error_files [ _error_files . index ( filename ) ]
except ValueError :
pass
return I18N_MODIFIED if filename . endswith ( '.mo' ) else FILE_MODIFIED
return False
def check_errors ( fn ) :
def wrapper ( * args , ** kwargs ) :
try :
fn ( * args , ** kwargs )
except ( ImportError , IndentationError , NameError , SyntaxError , TypeError , AttributeError ) :
et , ev , tb = sys . exc_info ( )
if getattr ( ev , 'filename' , None ) is None :
filename = traceback . extract_tb ( tb ) [ - 1 ] [ 0 ]
else :
filename = ev . filename
if filename not in _error_files :
_error_files . append ( filename )
raise
return wrapper
def ensure_echo_on ( ) :
if termios :
fd = sys . stdin
if fd . isatty ( ) :
attr_list = termios . tcgetattr ( fd )
if not attr_list [ 3 ] & termios . ECHO :
attr_list [ 3 ] |= termios . ECHO
if hasattr ( signal , 'SIGTTOU' ) :
old_handler = signal . signal ( signal . SIGTTOU , signal . SIG_IGN )
else :
old_handler = None
termios . tcsetattr ( fd , termios . TCSANOW , attr_list )
if old_handler is not None :
signal . signal ( signal . SIGTTOU , old_handler )
def reloader_thread ( ) :
ensure_echo_on ( )
if USE_INOTIFY :
fn = inotify_code_changed
else :
fn = code_changed
while RUN_RELOADER :
change = fn ( )
if change == FILE_MODIFIED :
sys . exit ( 3 )
elif change == I18N_MODIFIED :
reset_translations ( )
time . sleep ( 1 )
def restart_with_reloader ( ) :
while True :
args = [ sys . executable ] + [ '-W%s' % o for o in sys . warnoptions ] + sys . argv
if sys . platform == "win32" :
args = [ '"%s"' % arg for arg in args ]
new_environ = os . environ . copy ( )
new_environ [ "RUN_MAIN" ] = 'true'
exit_code = os . spawnve ( os . P_WAIT , sys . executable , args , new_environ )
if exit_code != 3 :
return exit_code
def python_reloader ( main_func , args , kwargs ) :
if os . environ . get ( "RUN_MAIN" ) == "true" :
thread . start_new_thread ( main_func , args , kwargs )
try :
reloader_thread ( )
except KeyboardInterrupt :
pass
else :
try :
exit_code = restart_with_reloader ( )
if exit_code < 0 :
os . kill ( os . getpid ( ) , - exit_code )
else :
sys . exit ( exit_code )
except KeyboardInterrupt :
pass
def jython_reloader ( main_func , args , kwargs ) :
from _systemrestart import SystemRestart
thread . start_new_thread ( main_func , args )
while True :
if code_changed ( ) :
raise SystemRestart
time . sleep ( 1 )
def main ( main_func , args = None , kwargs = None ) :
if args is None :
args = ( )
if kwargs is None :
kwargs = { }
if sys . platform . startswith ( 'java' ) :
reloader = jython_reloader
else :
reloader = python_reloader
wrapped_main_func = check_errors ( main_func )
reloader ( wrapped_main_func , args , kwargs )
BASE2_ALPHABET = '01'
BASE16_ALPHABET = '0123456789ABCDEF'
BASE56_ALPHABET = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz'
BASE36_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz'
BASE62_ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
BASE64_ALPHABET = BASE62_ALPHABET + '-_'
class BaseConverter ( object ) :
decimal_digits = '0123456789'
def __init__ ( self , digits , sign = '-' ) :
self . sign = sign
self . digits = digits
if sign in self . digits :
raise ValueError ( 'Sign character found in converter base digits.' )
def __repr__ ( self ) :
return "" % ( len ( self . digits ) , self . digits )
def encode ( self , i ) :
neg , value = self . convert ( i , self . decimal_digits , self . digits , '-' )
if neg :
return self . sign + value
return value
def decode ( self , s ) :
neg , value = self . convert ( s , self . digits , self . decimal_digits , self . sign )
if neg :
value = '-' + value
return int ( value )
def convert ( self , number , from_digits , to_digits , sign ) :
if str ( number ) [ 0 ] == sign :
number = str ( number ) [ 1 : ]
neg = 1
else :
neg = 0
x = 0
for digit in str ( number ) :
x = x * len ( from_digits ) + from_digits . index ( digit )
if x == 0 :
res = to_digits [ 0 ]
else :
res = ''
while x > 0 :
digit = x % len ( to_digits )
res = to_digits [ digit ] + res
x = int ( x // len ( to_digits ) )
return neg , res
base2 = BaseConverter ( BASE2_ALPHABET )
base16 = BaseConverter ( BASE16_ALPHABET )
base36 = BaseConverter ( BASE36_ALPHABET )
base56 = BaseConverter ( BASE56_ALPHABET )
base62 = BaseConverter ( BASE62_ALPHABET )
base64 = BaseConverter ( BASE64_ALPHABET , sign = '$' )
from __future__ import unicode_literals
import hashlib
import re
import time
from django . conf import settings
from django . core . cache import caches
from django . utils . encoding import iri_to_uri , force_bytes , force_text
from django . utils . http import http_date
from django . utils . timezone import get_current_timezone_name
from django . utils . translation import get_language
cc_delim_re = re . compile ( r'\s*,\s*' )
def patch_cache_control ( response , ** kwargs ) :
def dictitem ( s ) :
t = s . split ( '=' , 1 )
if len ( t ) > 1 :
return ( t [ 0 ] . lower ( ) , t [ 1 ] )
else :
return ( t [ 0 ] . lower ( ) , True )
def dictvalue ( t ) :
if t [ 1 ] is True :
return t [ 0 ]
else :
return '%s=%s' % ( t [ 0 ] , t [ 1 ] )
if response . has_header ( 'Cache-Control' ) :
cc = cc_delim_re . split ( response [ 'Cache-Control' ] )
cc = dict ( dictitem ( el ) for el in cc )
else :
cc = { }
if 'max-age' in cc and 'max_age' in kwargs :
kwargs [ 'max_age' ] = min ( int ( cc [ 'max-age' ] ) , kwargs [ 'max_age' ] )
if 'private' in cc and 'public' in kwargs :
del cc [ 'private' ]
elif 'public' in cc and 'private' in kwargs :
del cc [ 'public' ]
for ( k , v ) in kwargs . items ( ) :
cc [ k . replace ( '_' , '-' ) ] = v
cc = ', ' . join ( dictvalue ( el ) for el in cc . items ( ) )
response [ 'Cache-Control' ] = cc
def get_max_age ( response ) :
if not response . has_header ( 'Cache-Control' ) :
return
cc = dict ( _to_tuple ( el ) for el in cc_delim_re . split ( response [ 'Cache-Control' ] ) )
if 'max-age' in cc :
try :
return int ( cc [ 'max-age' ] )
except ( ValueError , TypeError ) :
pass
def _set_response_etag ( response ) :
if not response . streaming :
response [ 'ETag' ] = '"%s"' % hashlib . md5 ( response . content ) . hexdigest ( )
return response
def patch_response_headers ( response , cache_timeout = None ) :
if cache_timeout is None :
cache_timeout = settings . CACHE_MIDDLEWARE_SECONDS
if cache_timeout < 0 :
cache_timeout = 0
if settings . USE_ETAGS and not response . has_header ( 'ETag' ) :
if hasattr ( response , 'render' ) and callable ( response . render ) :
response . add_post_render_callback ( _set_response_etag )
else :
response = _set_response_etag ( response )
if not response . has_header ( 'Last-Modified' ) :
response [ 'Last-Modified' ] = http_date ( )
if not response . has_header ( 'Expires' ) :
response [ 'Expires' ] = http_date ( time . time ( ) + cache_timeout )
patch_cache_control ( response , max_age = cache_timeout )
def add_never_cache_headers ( response ) :
patch_response_headers ( response , cache_timeout = - 1 )
def patch_vary_headers ( response , newheaders ) :
""" Adds (or updates) the "Vary" header in the given HttpResponse object. newheaders is a list of header names that should be in "Vary". Existing headers in "Vary" aren't removed. """
if response . has_header ( 'Vary' ) :
vary_headers = cc_delim_re . split ( response [ 'Vary' ] )
else :
vary_headers = [ ]
existing_headers = set ( header . lower ( ) for header in vary_headers )
additional_headers = [ newheader for newheader in newheaders if newheader . lower ( ) not in existing_headers ]
response [ 'Vary' ] = ', ' . join ( vary_headers + additional_headers )
def has_vary_header ( response , header_query ) :
if not response . has_header ( 'Vary' ) :
return False
vary_headers = cc_delim_re . split ( response [ 'Vary' ] )
existing_headers = set ( header . lower ( ) for header in vary_headers )
return header_query . lower ( ) in existing_headers
def _i18n_cache_key_suffix ( request , cache_key ) :
if settings . USE_I18N or settings . USE_L10N :
cache_key += '.%s' % getattr ( request , 'LANGUAGE_CODE' , get_language ( ) )
if settings . USE_TZ :
tz_name = force_text ( get_current_timezone_name ( ) , errors = 'ignore' )
cache_key += '.%s' % tz_name . encode ( 'ascii' , 'ignore' ) . decode ( 'ascii' ) . replace ( ' ' , '_' )
return cache_key
def _generate_cache_key ( request , method , headerlist , key_prefix ) :
ctx = hashlib . md5 ( )
for header in headerlist :
value = request . META . get ( header , None )
if value is not None :
ctx . update ( force_bytes ( value ) )
url = hashlib . md5 ( force_bytes ( iri_to_uri ( request . build_absolute_uri ( ) ) ) )
cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % ( key_prefix , method , url . hexdigest ( ) , ctx . hexdigest ( ) )
return _i18n_cache_key_suffix ( request , cache_key )
def _generate_cache_header_key ( key_prefix , request ) :
url = hashlib . md5 ( force_bytes ( iri_to_uri ( request . build_absolute_uri ( ) ) ) )
cache_key = 'views.decorators.cache.cache_header.%s.%s' % ( key_prefix , url . hexdigest ( ) )
return _i18n_cache_key_suffix ( request , cache_key )
def get_cache_key ( request , key_prefix = None , method = 'GET' , cache = None ) :
if key_prefix is None :
key_prefix = settings . CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key ( key_prefix , request )
if cache is None :
cache = caches [ settings . CACHE_MIDDLEWARE_ALIAS ]
headerlist = cache . get ( cache_key , None )
if headerlist is not None :
return _generate_cache_key ( request , method , headerlist , key_prefix )
else :
return None
def learn_cache_key ( request , response , cache_timeout = None , key_prefix = None , cache = None ) :
if key_prefix is None :
key_prefix = settings . CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None :
cache_timeout = settings . CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key ( key_prefix , request )
if cache is None :
cache = caches [ settings . CACHE_MIDDLEWARE_ALIAS ]
if response . has_header ( 'Vary' ) :
is_accept_language_redundant = settings . USE_I18N or settings . USE_L10N
headerlist = [ ]
for header in cc_delim_re . split ( response [ 'Vary' ] ) :
header = header . upper ( ) . replace ( '-' , '_' )
if header == 'ACCEPT_LANGUAGE' and is_accept_language_redundant :
continue
headerlist . append ( 'HTTP_' + header )
headerlist . sort ( )
cache . set ( cache_key , headerlist , cache_timeout )
return _generate_cache_key ( request , request . method , headerlist , key_prefix )
else :
cache . set ( cache_key , [ ] , cache_timeout )
return _generate_cache_key ( request , request . method , [ ] , key_prefix )
def _to_tuple ( s ) :
t = s . split ( '=' , 1 )
if len ( t ) == 2 :
return t [ 0 ] . lower ( ) , t [ 1 ]
return t [ 0 ] . lower ( ) , True
__all__ = [ 'luhn' ]
from django . utils import six
LUHN_ODD_LOOKUP = ( 0 , 2 , 4 , 6 , 8 , 1 , 3 , 5 , 7 , 9 )
def luhn ( candidate ) :
if not isinstance ( candidate , six . string_types ) :
candidate = str ( candidate )
try :
evens = sum ( int ( c ) for c in candidate [ - 1 : : - 2 ] )
odds = sum ( LUHN_ODD_LOOKUP [ int ( c ) ] for c in candidate [ - 2 : : - 2 ] )
return ( ( evens + odds ) % 10 == 0 )
except ValueError :
return False
from __future__ import unicode_literals
import hmac
import struct
import hashlib
import binascii
import time
import random
try :
random = random . SystemRandom ( )
using_sysrandom = True
except NotImplementedError :
import warnings
warnings . warn ( 'A secure pseudo-random number generator is not available ' 'on your system. Falling back to Mersenne Twister.' )
using_sysrandom = False
from django . conf import settings
from django . utils . encoding import force_bytes
from django . utils import six
from django . utils . six . moves import xrange
def salted_hmac ( key_salt , value , secret = None ) :
if secret is None :
secret = settings . SECRET_KEY
key_salt = force_bytes ( key_salt )
secret = force_bytes ( secret )
key = hashlib . sha1 ( key_salt + secret ) . digest ( )
return hmac . new ( key , msg = force_bytes ( value ) , digestmod = hashlib . sha1 )
def get_random_string ( length = 12 , allowed_chars = 'abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' ) :
if not using_sysrandom :
random . seed ( hashlib . sha256 ( ( "%s%s%s" % ( random . getstate ( ) , time . time ( ) , settings . SECRET_KEY ) ) . encode ( 'utf-8' ) ) . digest ( ) )
return '' . join ( random . choice ( allowed_chars ) for i in range ( length ) )
if hasattr ( hmac , "compare_digest" ) :
def constant_time_compare ( val1 , val2 ) :
return hmac . compare_digest ( force_bytes ( val1 ) , force_bytes ( val2 ) )
else :
def constant_time_compare ( val1 , val2 ) :
if len ( val1 ) != len ( val2 ) :
return False
result = 0
if six . PY3 and isinstance ( val1 , bytes ) and isinstance ( val2 , bytes ) :
for x , y in zip ( val1 , val2 ) :
result |= x ^ y
else :
for x , y in zip ( val1 , val2 ) :
result |= ord ( x ) ^ ord ( y )
return result == 0
def _bin_to_long ( x ) :
return int ( binascii . hexlify ( x ) , 16 )
def _long_to_bin ( x , hex_format_string ) :
return binascii . unhexlify ( ( hex_format_string % x ) . encode ( 'ascii' ) )
if hasattr ( hashlib , "pbkdf2_hmac" ) :
def pbkdf2 ( password , salt , iterations , dklen = 0 , digest = None ) :
if digest is None :
digest = hashlib . sha256
if not dklen :
dklen = None
password = force_bytes ( password )
salt = force_bytes ( salt )
return hashlib . pbkdf2_hmac ( digest ( ) . name , password , salt , iterations , dklen )
else :
def pbkdf2 ( password , salt , iterations , dklen = 0 , digest = None ) :
assert iterations > 0
if not digest :
digest = hashlib . sha256
password = force_bytes ( password )
salt = force_bytes ( salt )
hlen = digest ( ) . digest_size
if not dklen :
dklen = hlen
if dklen > ( 2 ** 32 - 1 ) * hlen :
raise OverflowError ( 'dklen too big' )
l = - ( - dklen // hlen )
r = dklen - ( l - 1 ) * hlen
hex_format_string = "%%0%ix" % ( hlen * 2 )
inner , outer = digest ( ) , digest ( )
if len ( password ) > inner . block_size :
password = digest ( password ) . digest ( )
password += b'\x00' * ( inner . block_size - len ( password ) )
inner . update ( password . translate ( hmac . trans_36 ) )
outer . update ( password . translate ( hmac . trans_5C ) )
def F ( i ) :
u = salt + struct . pack ( b'>I' , i )
result = 0
for j in xrange ( int ( iterations ) ) :
dig1 , dig2 = inner . copy ( ) , outer . copy ( )
dig1 . update ( u )
dig2 . update ( dig1 . digest ( ) )
u = dig2 . digest ( )
result ^= _bin_to_long ( u )
return _long_to_bin ( result , hex_format_string )
T = [ F ( x ) for x in range ( 1 , l ) ]
return b'' . join ( T ) + F ( l ) [ : r ]
import os
import sys
from . import six
buffering = int ( six . PY3 )
if os . name == 'posix' :
def become_daemon ( our_home_dir = '.' , out_log = '/dev/null' , err_log = '/dev/null' , umask = 0o022 ) :
try :
if os . fork ( ) > 0 :
sys . exit ( 0 )
except OSError as e :
sys . stderr . write ( "fork #1 failed: (%d) %s\n" % ( e . errno , e . strerror ) )
sys . exit ( 1 )
os . setsid ( )
os . chdir ( our_home_dir )
os . umask ( umask )
try :
if os . fork ( ) > 0 :
os . _exit ( 0 )
except OSError as e :
sys . stderr . write ( "fork #2 failed: (%d) %s\n" % ( e . errno , e . strerror ) )
os . _exit ( 1 )
si = open ( '/dev/null' , 'r' )
so = open ( out_log , 'a+' , buffering )
se = open ( err_log , 'a+' , buffering )
os . dup2 ( si . fileno ( ) , sys . stdin . fileno ( ) )
os . dup2 ( so . fileno ( ) , sys . stdout . fileno ( ) )
os . dup2 ( se . fileno ( ) , sys . stderr . fileno ( ) )
sys . stdout , sys . stderr = so , se
else :
def become_daemon ( our_home_dir = '.' , out_log = None , err_log = None , umask = 0o022 ) :
os . chdir ( our_home_dir )
os . umask ( umask )
sys . stdin . close ( )
sys . stdout . close ( )
sys . stderr . close ( )
if err_log :
sys . stderr = open ( err_log , 'a' , buffering )
else :
sys . stderr = NullDevice ( )
if out_log :
sys . stdout = open ( out_log , 'a' , buffering )
else :
sys . stdout = NullDevice ( )
class NullDevice :
def write ( self , s ) :
pass
import copy
import warnings
from collections import OrderedDict
from django . utils import six
from django . utils . deprecation import RemovedInDjango19Warning
class MergeDict ( object ) :
def __init__ ( self , * dicts ) :
warnings . warn ( '`MergeDict` is deprecated, use `dict.update()` ' 'instead.' , RemovedInDjango19Warning , 2 )
self . dicts = dicts
def __bool__ ( self ) :
return any ( self . dicts )
def __nonzero__ ( self ) :
return type ( self ) . __bool__ ( self )
def __getitem__ ( self , key ) :
for dict_ in self . dicts :
try :
return dict_ [ key ]
except KeyError :
pass
raise KeyError ( key )
def __copy__ ( self ) :
return self . __class__ ( * self . dicts )
def get ( self , key , default = None ) :
try :
return self [ key ]
except KeyError :
return default
def getlist ( self , key ) :
for dict_ in self . dicts :
if key in dict_ :
return dict_ . getlist ( key )
return [ ]
def _iteritems ( self ) :
seen = set ( )
for dict_ in self . dicts :
for item in six . iteritems ( dict_ ) :
k = item [ 0 ]
if k in seen :
continue
seen . add ( k )
yield item
def _iterkeys ( self ) :
for k , v in self . _iteritems ( ) :
yield k
def _itervalues ( self ) :
for k , v in self . _iteritems ( ) :
yield v
if six . PY3 :
items = _iteritems
keys = _iterkeys
values = _itervalues
else :
iteritems = _iteritems
iterkeys = _iterkeys
itervalues = _itervalues
def items ( self ) :
return list ( self . iteritems ( ) )
def keys ( self ) :
return list ( self . iterkeys ( ) )
def values ( self ) :
return list ( self . itervalues ( ) )
def has_key ( self , key ) :
for dict_ in self . dicts :
if key in dict_ :
return True
return False
__contains__ = has_key
__iter__ = _iterkeys
def copy ( self ) :
return self . __copy__ ( )
def __str__ ( self ) :
return str ( dict ( self . items ( ) ) )
def __repr__ ( self ) :
dictreprs = ', ' . join ( repr ( d ) for d in self . dicts )
return '%s(%s)' % ( self . __class__ . __name__ , dictreprs )
class SortedDict ( dict ) :
def __new__ ( cls , * args , ** kwargs ) :
instance = super ( SortedDict , cls ) . __new__ ( cls , * args , ** kwargs )
instance . keyOrder = [ ]
return instance
def __init__ ( self , data = None ) :
warnings . warn ( "SortedDict is deprecated and will be removed in Django 1.9." , RemovedInDjango19Warning , stacklevel = 2 )
if data is None or isinstance ( data , dict ) :
data = data or [ ]
super ( SortedDict , self ) . __init__ ( data )
self . keyOrder = list ( data ) if data else [ ]
else :
super ( SortedDict , self ) . __init__ ( )
super_set = super ( SortedDict , self ) . __setitem__
for key , value in data :
if key not in self :
self . keyOrder . append ( key )
super_set ( key , value )
def __deepcopy__ ( self , memo ) :
return self . __class__ ( [ ( key , copy . deepcopy ( value , memo ) ) for key , value in self . items ( ) ] )
def __copy__ ( self ) :
return self . copy ( )
def __setitem__ ( self , key , value ) :
if key not in self :
self . keyOrder . append ( key )
super ( SortedDict , self ) . __setitem__ ( key , value )
def __delitem__ ( self , key ) :
super ( SortedDict , self ) . __delitem__ ( key )
self . keyOrder . remove ( key )
def __iter__ ( self ) :
return iter ( self . keyOrder )
def __reversed__ ( self ) :
return reversed ( self . keyOrder )
def pop ( self , k , * args ) :
result = super ( SortedDict , self ) . pop ( k , * args )
try :
self . keyOrder . remove ( k )
except ValueError :
pass
return result
def popitem ( self ) :
result = super ( SortedDict , self ) . popitem ( )
self . keyOrder . remove ( result [ 0 ] )
return result
def _iteritems ( self ) :
for key in self . keyOrder :
yield key , self [ key ]
def _iterkeys ( self ) :
for key in self . keyOrder :
yield key
def _itervalues ( self ) :
for key in self . keyOrder :
yield self [ key ]
if six . PY3 :
items = _iteritems
keys = _iterkeys
values = _itervalues
else :
iteritems = _iteritems
iterkeys = _iterkeys
itervalues = _itervalues
def items ( self ) :
return [ ( k , self [ k ] ) for k in self . keyOrder ]
def keys ( self ) :
return self . keyOrder [ : ]
def values ( self ) :
return [ self [ k ] for k in self . keyOrder ]
def update ( self , dict_ ) :
for k , v in six . iteritems ( dict_ ) :
self [ k ] = v
def setdefault ( self , key , default ) :
if key not in self :
self . keyOrder . append ( key )
return super ( SortedDict , self ) . setdefault ( key , default )
def copy ( self ) :
return self . __class__ ( self )
def __repr__ ( self ) :
return '{%s}' % ', ' . join ( '%r: %r' % ( k , v ) for k , v in six . iteritems ( self ) )
def clear ( self ) :
super ( SortedDict , self ) . clear ( )
self . keyOrder = [ ]
class OrderedSet ( object ) :
def __init__ ( self , iterable = None ) :
self . dict = OrderedDict ( ( ( x , None ) for x in iterable ) if iterable else [ ] )
def add ( self , item ) :
self . dict [ item ] = None
def remove ( self , item ) :
del self . dict [ item ]
def discard ( self , item ) :
try :
self . remove ( item )
except KeyError :
pass
def __iter__ ( self ) :
return iter ( self . dict . keys ( ) )
def __contains__ ( self , item ) :
return item in self . dict
def __nonzero__ ( self ) :
return bool ( self . dict )
class MultiValueDictKeyError ( KeyError ) :
pass
class MultiValueDict ( dict ) :
def __init__ ( self , key_to_list_mapping = ( ) ) :
super ( MultiValueDict , self ) . __init__ ( key_to_list_mapping )
def __repr__ ( self ) :
return "<%s: %s>" % ( self . __class__ . __name__ , super ( MultiValueDict , self ) . __repr__ ( ) )
def __getitem__ ( self , key ) :
try :
list_ = super ( MultiValueDict , self ) . __getitem__ ( key )
except KeyError :
raise MultiValueDictKeyError ( repr ( key ) )
try :
return list_ [ - 1 ]
except IndexError :
return [ ]
def __setitem__ ( self , key , value ) :
super ( MultiValueDict , self ) . __setitem__ ( key , [ value ] )
def __copy__ ( self ) :
return self . __class__ ( [ ( k , v [ : ] ) for k , v in self . lists ( ) ] )
def __deepcopy__ ( self , memo = None ) :
if memo is None :
memo = { }
result = self . __class__ ( )
memo [ id ( self ) ] = result
for key , value in dict . items ( self ) :
dict . __setitem__ ( result , copy . deepcopy ( key , memo ) , copy . deepcopy ( value , memo ) )
return result
def __getstate__ ( self ) :
obj_dict = self . __dict__ . copy ( )
obj_dict [ '_data' ] = dict ( ( k , self . getlist ( k ) ) for k in self )
return obj_dict
def __setstate__ ( self , obj_dict ) :
data = obj_dict . pop ( '_data' , { } )
for k , v in data . items ( ) :
self . setlist ( k , v )
self . __dict__ . update ( obj_dict )
def get ( self , key , default = None ) :
try :
val = self [ key ]
except KeyError :
return default
if val == [ ] :
return default
return val
def getlist ( self , key , default = None ) :
try :
return super ( MultiValueDict , self ) . __getitem__ ( key )
except KeyError :
if default is None :
return [ ]
return default
def setlist ( self , key , list_ ) :
super ( MultiValueDict , self ) . __setitem__ ( key , list_ )
def setdefault ( self , key , default = None ) :
if key not in self :
self [ key ] = default
return self [ key ]
def setlistdefault ( self , key , default_list = None ) :
if key not in self :
if default_list is None :
default_list = [ ]
self . setlist ( key , default_list )
return self . getlist ( key )
def appendlist ( self , key , value ) :
self . setlistdefault ( key ) . append ( value )
def _iteritems ( self ) :
for key in self :
yield key , self [ key ]
def _iterlists ( self ) :
return six . iteritems ( super ( MultiValueDict , self ) )
def _itervalues ( self ) :
for key in self :
yield self [ key ]
if six . PY3 :
items = _iteritems
lists = _iterlists
values = _itervalues
else :
iteritems = _iteritems
iterlists = _iterlists
itervalues = _itervalues
def items ( self ) :
return list ( self . iteritems ( ) )
def lists ( self ) :
return list ( self . iterlists ( ) )
def values ( self ) :
return list ( self . itervalues ( ) )
def copy ( self ) :
return copy . copy ( self )
def update ( self , * args , ** kwargs ) :
if len ( args ) > 1 :
raise TypeError ( "update expected at most 1 arguments, got %d" % len ( args ) )
if args :
other_dict = args [ 0 ]
if isinstance ( other_dict , MultiValueDict ) :
for key , value_list in other_dict . lists ( ) :
self . setlistdefault ( key ) . extend ( value_list )
else :
try :
for key , value in other_dict . items ( ) :
self . setlistdefault ( key ) . append ( value )
except TypeError :
raise ValueError ( "MultiValueDict.update() takes either a MultiValueDict or dictionary" )
for key , value in six . iteritems ( kwargs ) :
self . setlistdefault ( key ) . append ( value )
def dict ( self ) :
return dict ( ( key , self [ key ] ) for key in self )
class ImmutableList ( tuple ) :
def __new__ ( cls , * args , ** kwargs ) :
if 'warning' in kwargs :
warning = kwargs [ 'warning' ]
del kwargs [ 'warning' ]
else :
warning = 'ImmutableList object is immutable.'
self = tuple . __new__ ( cls , * args , ** kwargs )
self . warning = warning
return self
def complain ( self , * wargs , ** kwargs ) :
if isinstance ( self . warning , Exception ) :
raise self . warning
else :
raise AttributeError ( self . warning )
__delitem__ = complain
__delslice__ = complain
__iadd__ = complain
__imul__ = complain
__setitem__ = complain
__setslice__ = complain
append = complain
extend = complain
insert = complain
pop = complain
remove = complain
sort = complain
reverse = complain
class DictWrapper ( dict ) :
def __init__ ( self , data , func , prefix ) :
super ( DictWrapper , self ) . __init__ ( data )
self . func = func
self . prefix = prefix
def __getitem__ ( self , key ) :
if key . startswith ( self . prefix ) :
use_func = True
key = key [ len ( self . prefix ) : ]
else :
use_func = False
value = super ( DictWrapper , self ) . __getitem__ ( key )
if use_func :
return self . func ( value )
return value
from __future__ import unicode_literals
import re
import time
import calendar
import datetime
from django . utils . dates import MONTHS , MONTHS_3 , MONTHS_ALT , MONTHS_AP , WEEKDAYS , WEEKDAYS_ABBR
from django . utils . translation import ugettext as _
from django . utils . encoding import force_text
from django . utils import six
from django . utils . timezone import get_default_timezone , is_aware , is_naive
re_formatchars = re . compile ( r'(? 11 :
return _ ( 'p.m.' )
return _ ( 'a.m.' )
def A ( self ) :
if self . data . hour > 11 :
return _ ( 'PM' )
return _ ( 'AM' )
def B ( self ) :
raise NotImplementedError ( 'may be implemented in a future release' )
def e ( self ) :
if not self . timezone :
return ""
try :
if hasattr ( self . data , 'tzinfo' ) and self . data . tzinfo :
return self . data . tzinfo . tzname ( self . data ) or ""
except NotImplementedError :
pass
return ""
def f ( self ) :
if self . data . minute == 0 :
return self . g ( )
return '%s:%s' % ( self . g ( ) , self . i ( ) )
def g ( self ) :
if self . data . hour == 0 :
return 12
if self . data . hour > 12 :
return self . data . hour - 12
return self . data . hour
def G ( self ) :
return self . data . hour
def h ( self ) :
return '%02d' % self . g ( )
def H ( self ) :
return '%02d' % self . G ( )
def i ( self ) :
return '%02d' % self . data . minute
def O ( self ) :
if not self . timezone :
return ""
seconds = self . Z ( )
sign = '-' if seconds < 0 else '+'
seconds = abs ( seconds )
return "%s%02d%02d" % ( sign , seconds // 3600 , ( seconds // 60 ) % 60 )
def P ( self ) :
if self . data . minute == 0 and self . data . hour == 0 :
return _ ( 'midnight' )
if self . data . minute == 0 and self . data . hour == 12 :
return _ ( 'noon' )
return '%s %s' % ( self . f ( ) , self . a ( ) )
def s ( self ) :
return '%02d' % self . data . second
def T ( self ) :
if not self . timezone :
return ""
name = self . timezone . tzname ( self . data ) if self . timezone else None
if name is None :
name = self . format ( 'O' )
return six . text_type ( name )
def u ( self ) :
return '%06d' % self . data . microsecond
def Z ( self ) :
if not self . timezone :
return ""
offset = self . timezone . utcoffset ( self . data )
return offset . days * 86400 + offset . seconds
class DateFormat ( TimeFormat ) :
year_days = [ None , 0 , 31 , 59 , 90 , 120 , 151 , 181 , 212 , 243 , 273 , 304 , 334 ]
def b ( self ) :
return MONTHS_3 [ self . data . month ]
def c ( self ) :
return self . data . isoformat ( )
def d ( self ) :
return '%02d' % self . data . day
def D ( self ) :
return WEEKDAYS_ABBR [ self . data . weekday ( ) ]
def E ( self ) :
return MONTHS_ALT [ self . data . month ]
def F ( self ) :
return MONTHS [ self . data . month ]
def I ( self ) :
if self . timezone and self . timezone . dst ( self . data ) :
return '1'
else :
return '0'
def j ( self ) :
return self . data . day
def l ( self ) :
return WEEKDAYS [ self . data . weekday ( ) ]
def L ( self ) :
return calendar . isleap ( self . data . year )
def m ( self ) :
return '%02d' % self . data . month
def M ( self ) :
return MONTHS_3 [ self . data . month ] . title ( )
def n ( self ) :
return self . data . month
def N ( self ) :
return MONTHS_AP [ self . data . month ]
def o ( self ) :
return self . data . isocalendar ( ) [ 0 ]
def r ( self ) :
return self . format ( 'D, j M Y H:i:s O' )
def S ( self ) :
if self . data . day in ( 11 , 12 , 13 ) :
return 'th'
last = self . data . day % 10
if last == 1 :
return 'st'
if last == 2 :
return 'nd'
if last == 3 :
return 'rd'
return 'th'
def t ( self ) :
return '%02d' % calendar . monthrange ( self . data . year , self . data . month ) [ 1 ]
def U ( self ) :
if isinstance ( self . data , datetime . datetime ) and is_aware ( self . data ) :
return int ( calendar . timegm ( self . data . utctimetuple ( ) ) )
else :
return int ( time . mktime ( self . data . timetuple ( ) ) )
def w ( self ) :
return ( self . data . weekday ( ) + 1 ) % 7
def W ( self ) :
week_number = None
jan1_weekday = self . data . replace ( month = 1 , day = 1 ) . weekday ( ) + 1
weekday = self . data . weekday ( ) + 1
day_of_year = self . z ( )
if day_of_year <= ( 8 - jan1_weekday ) and jan1_weekday > 4 :
if jan1_weekday == 5 or ( jan1_weekday == 6 and calendar . isleap ( self . data . year - 1 ) ) :
week_number = 53
else :
week_number = 52
else :
if calendar . isleap ( self . data . year ) :
i = 366
else :
i = 365
if ( i - day_of_year ) < ( 4 - weekday ) :
week_number = 1
else :
j = day_of_year + ( 7 - weekday ) + ( jan1_weekday - 1 )
week_number = j // 7
if jan1_weekday > 4 :
week_number -= 1
return week_number
def y ( self ) :
return six . text_type ( self . data . year ) [ 2 : ]
def Y ( self ) :
return self . data . year
def z ( self ) :
doy = self . year_days [ self . data . month ] + self . data . day
if self . L ( ) and self . data . month > 2 :
doy += 1
return doy
def format ( value , format_string ) :
df = DateFormat ( value )
return df . format ( format_string )
def time_format ( value , format_string ) :
tf = TimeFormat ( value )
return tf . format ( format_string )
import datetime
import re
from django . utils import six
from django . utils . timezone import utc , get_fixed_timezone
date_re = re . compile ( r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})$' )
time_re = re . compile ( r'(?P\d{1,2}):(?P\d{1,2})' r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' )
datetime_re = re . compile ( r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' r'[T ](?P\d{1,2}):(?P\d{1,2})' r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' )
def parse_date ( value ) :
match = date_re . match ( value )
if match :
kw = dict ( ( k , int ( v ) ) for k , v in six . iteritems ( match . groupdict ( ) ) )
return datetime . date ( ** kw )
def parse_time ( value ) :
match = time_re . match ( value )
if match :
kw = match . groupdict ( )
if kw [ 'microsecond' ] :
kw [ 'microsecond' ] = kw [ 'microsecond' ] . ljust ( 6 , '0' )
kw = dict ( ( k , int ( v ) ) for k , v in six . iteritems ( kw ) if v is not None )
return datetime . time ( ** kw )
def parse_datetime ( value ) :
match = datetime_re . match ( value )
if match :
kw = match . groupdict ( )
if kw [ 'microsecond' ] :
kw [ 'microsecond' ] = kw [ 'microsecond' ] . ljust ( 6 , '0' )
tzinfo = kw . pop ( 'tzinfo' )
if tzinfo == 'Z' :
tzinfo = utc
elif tzinfo is not None :
offset_mins = int ( tzinfo [ - 2 : ] ) if len ( tzinfo ) > 3 else 0
offset = 60 * int ( tzinfo [ 1 : 3 ] ) + offset_mins
if tzinfo [ 0 ] == '-' :
offset = - offset
tzinfo = get_fixed_timezone ( offset )
kw = dict ( ( k , int ( v ) ) for k , v in six . iteritems ( kw ) if v is not None )
kw [ 'tzinfo' ] = tzinfo
return datetime . datetime ( ** kw )
from django . utils . translation import ugettext_lazy as _ , pgettext_lazy
WEEKDAYS = { 0 : _ ( 'Monday' ) , 1 : _ ( 'Tuesday' ) , 2 : _ ( 'Wednesday' ) , 3 : _ ( 'Thursday' ) , 4 : _ ( 'Friday' ) , 5 : _ ( 'Saturday' ) , 6 : _ ( 'Sunday' ) }
WEEKDAYS_ABBR = { 0 : _ ( 'Mon' ) , 1 : _ ( 'Tue' ) , 2 : _ ( 'Wed' ) , 3 : _ ( 'Thu' ) , 4 : _ ( 'Fri' ) , 5 : _ ( 'Sat' ) , 6 : _ ( 'Sun' ) }
WEEKDAYS_REV = { 'monday' : 0 , 'tuesday' : 1 , 'wednesday' : 2 , 'thursday' : 3 , 'friday' : 4 , 'saturday' : 5 , 'sunday' : 6 }
MONTHS = { 1 : _ ( 'January' ) , 2 : _ ( 'February' ) , 3 : _ ( 'March' ) , 4 : _ ( 'April' ) , 5 : _ ( 'May' ) , 6 : _ ( 'June' ) , 7 : _ ( 'July' ) , 8 : _ ( 'August' ) , 9 : _ ( 'September' ) , 10 : _ ( 'October' ) , 11 : _ ( 'November' ) , 12 : _ ( 'December' ) }
MONTHS_3 = { 1 : _ ( 'jan' ) , 2 : _ ( 'feb' ) , 3 : _ ( 'mar' ) , 4 : _ ( 'apr' ) , 5 : _ ( 'may' ) , 6 : _ ( 'jun' ) , 7 : _ ( 'jul' ) , 8 : _ ( 'aug' ) , 9 : _ ( 'sep' ) , 10 : _ ( 'oct' ) , 11 : _ ( 'nov' ) , 12 : _ ( 'dec' ) }
MONTHS_3_REV = { 'jan' : 1 , 'feb' : 2 , 'mar' : 3 , 'apr' : 4 , 'may' : 5 , 'jun' : 6 , 'jul' : 7 , 'aug' : 8 , 'sep' : 9 , 'oct' : 10 , 'nov' : 11 , 'dec' : 12 }
MONTHS_AP = { 1 : pgettext_lazy ( 'abbrev. month' , 'Jan.' ) , 2 : pgettext_lazy ( 'abbrev. month' , 'Feb.' ) , 3 : pgettext_lazy ( 'abbrev. month' , 'March' ) , 4 : pgettext_lazy ( 'abbrev. month' , 'April' ) , 5 : pgettext_lazy ( 'abbrev. month' , 'May' ) , 6 : pgettext_lazy ( 'abbrev. month' , 'June' ) , 7 : pgettext_lazy ( 'abbrev. month' , 'July' ) , 8 : pgettext_lazy ( 'abbrev. month' , 'Aug.' ) , 9 : pgettext_lazy ( 'abbrev. month' , 'Sept.' ) , 10 : pgettext_lazy ( 'abbrev. month' , 'Oct.' ) , 11 : pgettext_lazy ( 'abbrev. month' , 'Nov.' ) , 12 : pgettext_lazy ( 'abbrev. month' , 'Dec.' ) }
MONTHS_ALT = { 1 : pgettext_lazy ( 'alt. month' , 'January' ) , 2 : pgettext_lazy ( 'alt. month' , 'February' ) , 3 : pgettext_lazy ( 'alt. month' , 'March' ) , 4 : pgettext_lazy ( 'alt. month' , 'April' ) , 5 : pgettext_lazy ( 'alt. month' , 'May' ) , 6 : pgettext_lazy ( 'alt. month' , 'June' ) , 7 : pgettext_lazy ( 'alt. month' , 'July' ) , 8 : pgettext_lazy ( 'alt. month' , 'August' ) , 9 : pgettext_lazy ( 'alt. month' , 'September' ) , 10 : pgettext_lazy ( 'alt. month' , 'October' ) , 11 : pgettext_lazy ( 'alt. month' , 'November' ) , 12 : pgettext_lazy ( 'alt. month' , 'December' ) }
from datetime import date as real_date , datetime as real_datetime
import re
import time
class date ( real_date ) :
def strftime ( self , fmt ) :
return strftime ( self , fmt )
class datetime ( real_datetime ) :
def strftime ( self , fmt ) :
return strftime ( self , fmt )
@ classmethod
def combine ( cls , date , time ) :
return cls ( date . year , date . month , date . day , time . hour , time . minute , time . second , time . microsecond , time . tzinfo )
def date ( self ) :
return date ( self . year , self . month , self . day )
def new_date ( d ) :
return date ( d . year , d . month , d . day )
def new_datetime ( d ) :
kw = [ d . year , d . month , d . day ]
if isinstance ( d , real_datetime ) :
kw . extend ( [ d . hour , d . minute , d . second , d . microsecond , d . tzinfo ] )
return datetime ( * kw )
_illegal_formatting = re . compile ( r"((^|[^%])(%%)*%[sy])" )
def _findall ( text , substr ) :
sites = [ ]
i = 0
while 1 :
j = text . find ( substr , i )
if j == - 1 :
break
sites . append ( j )
i = j + 1
return sites
def strftime ( dt , fmt ) :
if dt . year >= 1900 :
return super ( type ( dt ) , dt ) . strftime ( fmt )
illegal_formatting = _illegal_formatting . search ( fmt )
if illegal_formatting :
raise TypeError ( "strftime of dates before 1900 does not handle" + illegal_formatting . group ( 0 ) )
year = dt . year
delta = 2000 - year
off = 6 * ( delta // 100 + delta // 400 )
year = year + off
year = year + ( ( 2000 - year ) // 28 ) * 28
timetuple = dt . timetuple ( )
s1 = time . strftime ( fmt , ( year , ) + timetuple [ 1 : ] )
sites1 = _findall ( s1 , str ( year ) )
s2 = time . strftime ( fmt , ( year + 28 , ) + timetuple [ 1 : ] )
sites2 = _findall ( s2 , str ( year + 28 ) )
sites = [ ]
for site in sites1 :
if site in sites2 :
sites . append ( site )
s = s1
syear = "%04d" % ( dt . year , )
for site in sites :
s = s [ : site ] + syear + s [ site + 4 : ]
return s
def deconstructible ( * args , ** kwargs ) :
path = kwargs . pop ( 'path' , None )
def decorator ( klass ) :
def __new__ ( cls , * args , ** kwargs ) :
obj = super ( klass , cls ) . __new__ ( cls )
obj . _constructor_args = ( args , kwargs )
return obj
def deconstruct ( obj ) :
return ( path or '%s.%s' % ( obj . __class__ . __module__ , obj . __class__ . __name__ ) , obj . _constructor_args [ 0 ] , obj . _constructor_args [ 1 ] , )
klass . __new__ = staticmethod ( __new__ )
klass . deconstruct = deconstruct
return klass
if not args :
return decorator
return decorator ( * args , ** kwargs )
from functools import wraps , update_wrapper , WRAPPER_ASSIGNMENTS
from django . utils import six
class classonlymethod ( classmethod ) :
def __get__ ( self , instance , owner ) :
if instance is not None :
raise AttributeError ( "This method is available only on the view class." )
return super ( classonlymethod , self ) . __get__ ( instance , owner )
def method_decorator ( decorator ) :
def _dec ( func ) :
def _wrapper ( self , * args , ** kwargs ) :
@ decorator
def bound_func ( * args2 , ** kwargs2 ) :
return func . __get__ ( self , type ( self ) ) ( * args2 , ** kwargs2 )
return bound_func ( * args , ** kwargs )
@ decorator
def dummy ( * args , ** kwargs ) :
pass
update_wrapper ( _wrapper , dummy )
update_wrapper ( _wrapper , func )
return _wrapper
update_wrapper ( _dec , decorator , assigned = available_attrs ( decorator ) )
if hasattr ( decorator , '__name__' ) :
_dec . __name__ = 'method_decorator(%s)' % decorator . __name__
else :
_dec . __name__ = 'method_decorator(%s)' % decorator . __class__ . __name__
return _dec
def decorator_from_middleware_with_args ( middleware_class ) :
return make_middleware_decorator ( middleware_class )
def decorator_from_middleware ( middleware_class ) :
return make_middleware_decorator ( middleware_class ) ( )
def available_attrs ( fn ) :
if six . PY3 :
return WRAPPER_ASSIGNMENTS
else :
return tuple ( a for a in WRAPPER_ASSIGNMENTS if hasattr ( fn , a ) )
def make_middleware_decorator ( middleware_class ) :
def _make_decorator ( * m_args , ** m_kwargs ) :
middleware = middleware_class ( * m_args , ** m_kwargs )
def _decorator ( view_func ) :
@ wraps ( view_func , assigned = available_attrs ( view_func ) )
def _wrapped_view ( request , * args , ** kwargs ) :
if hasattr ( middleware , 'process_request' ) :
result = middleware . process_request ( request )
if result is not None :
return result
if hasattr ( middleware , 'process_view' ) :
result = middleware . process_view ( request , view_func , args , kwargs )
if result is not None :
return result
try :
response = view_func ( request , * args , ** kwargs )
except Exception as e :
if hasattr ( middleware , 'process_exception' ) :
result = middleware . process_exception ( request , e )
if result is not None :
return result
raise
if hasattr ( response , 'render' ) and callable ( response . render ) :
if hasattr ( middleware , 'process_template_response' ) :
response = middleware . process_template_response ( request , response )
if hasattr ( middleware , 'process_response' ) :
callback = lambda response : middleware . process_response ( request , response )
response . add_post_render_callback ( callback )
else :
if hasattr ( middleware , 'process_response' ) :
return middleware . process_response ( request , response )
return response
return _wrapped_view
return _decorator
return _make_decorator
import inspect
import warnings
class RemovedInDjango20Warning ( PendingDeprecationWarning ) :
pass
class RemovedInDjango19Warning ( DeprecationWarning ) :
pass
RemovedInNextVersionWarning = RemovedInDjango19Warning
class warn_about_renamed_method ( object ) :
def __init__ ( self , class_name , old_method_name , new_method_name , deprecation_warning ) :
self . class_name = class_name
self . old_method_name = old_method_name
self . new_method_name = new_method_name
self . deprecation_warning = deprecation_warning
def __call__ ( self , f ) :
def wrapped ( * args , ** kwargs ) :
warnings . warn ( "`%s.%s` is deprecated, use `%s` instead." % ( self . class_name , self . old_method_name , self . new_method_name ) , self . deprecation_warning , 2 )
return f ( * args , ** kwargs )
return wrapped
class RenameMethodsBase ( type ) :
renamed_methods = ( )
def __new__ ( cls , name , bases , attrs ) :
new_class = super ( RenameMethodsBase , cls ) . __new__ ( cls , name , bases , attrs )
for base in inspect . getmro ( new_class ) :
class_name = base . __name__
for renamed_method in cls . renamed_methods :
old_method_name = renamed_method [ 0 ]
old_method = base . __dict__ . get ( old_method_name )
new_method_name = renamed_method [ 1 ]
new_method = base . __dict__ . get ( new_method_name )
deprecation_warning = renamed_method [ 2 ]
wrapper = warn_about_renamed_method ( class_name , * renamed_method )
if not new_method and old_method :
warnings . warn ( "`%s.%s` method should be renamed `%s`." % ( class_name , old_method_name , new_method_name ) , deprecation_warning , 2 )
setattr ( base , new_method_name , old_method )
setattr ( base , old_method_name , wrapper ( old_method ) )
if not old_method and new_method :
setattr ( base , old_method_name , wrapper ( new_method ) )
return new_class
import warnings
from django . utils . deprecation import RemovedInDjango19Warning
warnings . warn ( "django.utils.dictconfig will be removed in Django 1.9." , RemovedInDjango19Warning , stacklevel = 2 )
import logging . handlers
import re
import sys
import types
from django . utils import six
IDENTIFIER = re . compile ( '^[a-z_][a-z0-9_]*$' , re . I )
def valid_ident ( s ) :
m = IDENTIFIER . match ( s )
if not m :
raise ValueError ( 'Not a valid Python identifier: %r' % s )
return True
try :
from logging import _checkLevel
except ImportError :
def _checkLevel ( level ) :
if isinstance ( level , int ) :
rv = level
elif str ( level ) == level :
if level not in logging . _levelNames :
raise ValueError ( 'Unknown level: %r' % level )
rv = logging . _levelNames [ level ]
else :
raise TypeError ( 'Level not an integer or a ' 'valid string: %r' % level )
return rv
class ConvertingDict ( dict ) :
def __getitem__ ( self , key ) :
value = dict . __getitem__ ( self , key )
result = self . configurator . convert ( value )
if value is not result :
self [ key ] = result
if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) :
result . parent = self
result . key = key
return result
def get ( self , key , default = None ) :
value = dict . get ( self , key , default )
result = self . configurator . convert ( value )
if value is not result :
self [ key ] = result
if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) :
result . parent = self
result . key = key
return result
def pop ( self , key , default = None ) :
value = dict . pop ( self , key , default )
result = self . configurator . convert ( value )
if value is not result :
if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) :
result . parent = self
result . key = key
return result
class ConvertingList ( list ) :
def __getitem__ ( self , key ) :
value = list . __getitem__ ( self , key )
result = self . configurator . convert ( value )
if value is not result :
self [ key ] = result
if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) :
result . parent = self
result . key = key
return result
def pop ( self , idx = - 1 ) :
value = list . pop ( self , idx )
result = self . configurator . convert ( value )
if value is not result :
if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) :
result . parent = self
return result
class ConvertingTuple ( tuple ) :
def __getitem__ ( self , key ) :
value = tuple . __getitem__ ( self , key )
result = self . configurator . convert ( value )
if value is not result :
if type ( result ) in ( ConvertingDict , ConvertingList , ConvertingTuple ) :
result . parent = self
result . key = key
return result
class BaseConfigurator ( object ) :
CONVERT_PATTERN = re . compile ( r'^(?P[a-z]+)://(?P.*)$' )
WORD_PATTERN = re . compile ( r'^\s*(\w+)\s*' )
DOT_PATTERN = re . compile ( r'^\.\s*(\w+)\s*' )
INDEX_PATTERN = re . compile ( r'^\[\s*(\w+)\s*\]\s*' )
DIGIT_PATTERN = re . compile ( r'^\d+$' )
value_converters = { 'ext' : 'ext_convert' , 'cfg' : 'cfg_convert' , }
importer = __import__
def __init__ ( self , config ) :
self . config = ConvertingDict ( config )
self . config . configurator = self
def resolve ( self , s ) :
name = s . split ( '.' )
used = name . pop ( 0 )
try :
found = self . importer ( used )
for frag in name :
used += '.' + frag
try :
found = getattr ( found , frag )
except AttributeError :
self . importer ( used )
found = getattr ( found , frag )
return found
except ImportError :
e , tb = sys . exc_info ( ) [ 1 : ]
v = ValueError ( 'Cannot resolve %r: %s' % ( s , e ) )
v . __cause__ , v . __traceback__ = e , tb
raise v
def ext_convert ( self , value ) :
return self . resolve ( value )
def cfg_convert ( self , value ) :
rest = value
m = self . WORD_PATTERN . match ( rest )
if m is None :
raise ValueError ( "Unable to convert %r" % value )
else :
rest = rest [ m . end ( ) : ]
d = self . config [ m . groups ( ) [ 0 ] ]
while rest :
m = self . DOT_PATTERN . match ( rest )
if m :
d = d [ m . groups ( ) [ 0 ] ]
else :
m = self . INDEX_PATTERN . match ( rest )
if m :
idx = m . groups ( ) [ 0 ]
if not self . DIGIT_PATTERN . match ( idx ) :
d = d [ idx ]
else :
try :
n = int ( idx )
d = d [ n ]
except TypeError :
d = d [ idx ]
if m :
rest = rest [ m . end ( ) : ]
else :
raise ValueError ( 'Unable to convert ' '%r at %r' % ( value , rest ) )
return d
def convert ( self , value ) :
if not isinstance ( value , ConvertingDict ) and isinstance ( value , dict ) :
value = ConvertingDict ( value )
value . configurator = self
elif not isinstance ( value , ConvertingList ) and isinstance ( value , list ) :
value = ConvertingList ( value )
value . configurator = self
elif not isinstance ( value , ConvertingTuple ) and isinstance ( value , tuple ) :
value = ConvertingTuple ( value )
value . configurator = self
elif isinstance ( value , six . string_types ) :
m = self . CONVERT_PATTERN . match ( value )
if m :
d = m . groupdict ( )
prefix = d [ 'prefix' ]
converter = self . value_converters . get ( prefix , None )
if converter :
suffix = d [ 'suffix' ]
converter = getattr ( self , converter )
value = converter ( suffix )
return value
def configure_custom ( self , config ) :
c = config . pop ( '()' )
if not hasattr ( c , '__call__' ) and hasattr ( types , 'ClassType' ) and type ( c ) != types . ClassType :
c = self . resolve ( c )
props = config . pop ( '.' , None )
kwargs = dict ( ( k , config [ k ] ) for k in config if valid_ident ( k ) )
result = c ( ** kwargs )
if props :
for name , value in props . items ( ) :
setattr ( result , name , value )
return result
def as_tuple ( self , value ) :
if isinstance ( value , list ) :
value = tuple ( value )
return value
class DictConfigurator ( BaseConfigurator ) :
def configure ( self ) :
config = self . config
if 'version' not in config :
raise ValueError ( "dictionary doesn't specify a version" )
if config [ 'version' ] != 1 :
raise ValueError ( "Unsupported version: %s" % config [ 'version' ] )
incremental = config . pop ( 'incremental' , False )
EMPTY_DICT = { }
logging . _acquireLock ( )
try :
if incremental :
handlers = config . get ( 'handlers' , EMPTY_DICT )
if sys . version_info [ : 2 ] == ( 2 , 7 ) :
for name in handlers :
if name not in logging . _handlers :
raise ValueError ( 'No handler found with ' 'name %r' % name )
else :
try :
handler = logging . _handlers [ name ]
handler_config = handlers [ name ]
level = handler_config . get ( 'level' , None )
if level :
handler . setLevel ( _checkLevel ( level ) )
except StandardError as e :
raise ValueError ( 'Unable to configure handler ' '%r: %s' % ( name , e ) )
loggers = config . get ( 'loggers' , EMPTY_DICT )
for name in loggers :
try :
self . configure_logger ( name , loggers [ name ] , True )
except StandardError as e :
raise ValueError ( 'Unable to configure logger ' '%r: %s' % ( name , e ) )
root = config . get ( 'root' , None )
if root :
try :
self . configure_root ( root , True )
except StandardError as e :
raise ValueError ( 'Unable to configure root ' 'logger: %s' % e )
else :
disable_existing = config . pop ( 'disable_existing_loggers' , True )
logging . _handlers . clear ( )
del logging . _handlerList [ : ]
formatters = config . get ( 'formatters' , EMPTY_DICT )
for name in formatters :
try :
formatters [ name ] = self . configure_formatter ( formatters [ name ] )
except StandardError as e :
raise ValueError ( 'Unable to configure ' 'formatter %r: %s' % ( name , e ) )
filters = config . get ( 'filters' , EMPTY_DICT )
for name in filters :
try :
filters [ name ] = self . configure_filter ( filters [ name ] )
except StandardError as e :
raise ValueError ( 'Unable to configure ' 'filter %r: %s' % ( name , e ) )
handlers = config . get ( 'handlers' , EMPTY_DICT )
for name in sorted ( handlers ) :
try :
handler = self . configure_handler ( handlers [ name ] )
handler . name = name
handlers [ name ] = handler
except StandardError as e :
raise ValueError ( 'Unable to configure handler ' '%r: %s' % ( name , e ) )
root = logging . root
existing = list ( root . manager . loggerDict )
existing . sort ( )
child_loggers = [ ]
loggers = config . get ( 'loggers' , EMPTY_DICT )
for name in loggers :
if name in existing :
i = existing . index ( name )
prefixed = name + "."
pflen = len ( prefixed )
num_existing = len ( existing )
i = i + 1
while ( i < num_existing ) and ( existing [ i ] [ : pflen ] == prefixed ) :
child_loggers . append ( existing [ i ] )
i = i + 1
existing . remove ( name )
try :
self . configure_logger ( name , loggers [ name ] )
except StandardError as e :
raise ValueError ( 'Unable to configure logger ' '%r: %s' % ( name , e ) )
for log in existing :
logger = root . manager . loggerDict [ log ]
if log in child_loggers :
logger . level = logging . NOTSET
logger . handlers = [ ]
logger . propagate = True
elif disable_existing :
logger . disabled = True
root = config . get ( 'root' , None )
if root :
try :
self . configure_root ( root )
except StandardError as e :
raise ValueError ( 'Unable to configure root ' 'logger: %s' % e )
finally :
logging . _releaseLock ( )
def configure_formatter ( self , config ) :
if '()' in config :
factory = config [ '()' ]
try :
result = self . configure_custom ( config )
except TypeError as te :
if "'format'" not in str ( te ) :
raise
config [ 'fmt' ] = config . pop ( 'format' )
config [ '()' ] = factory
result = self . configure_custom ( config )
else :
fmt = config . get ( 'format' , None )
dfmt = config . get ( 'datefmt' , None )
result = logging . Formatter ( fmt , dfmt )
return result
def configure_filter ( self , config ) :
if '()' in config :
result = self . configure_custom ( config )
else :
name = config . get ( 'name' , '' )
result = logging . Filter ( name )
return result
def add_filters ( self , filterer , filters ) :
for f in filters :
try :
filterer . addFilter ( self . config [ 'filters' ] [ f ] )
except StandardError as e :
raise ValueError ( 'Unable to add filter %r: %s' % ( f , e ) )
def configure_handler ( self , config ) :
formatter = config . pop ( 'formatter' , None )
if formatter :
try :
formatter = self . config [ 'formatters' ] [ formatter ]
except StandardError as e :
raise ValueError ( 'Unable to set formatter ' '%r: %s' % ( formatter , e ) )
level = config . pop ( 'level' , None )
filters = config . pop ( 'filters' , None )
if '()' in config :
c = config . pop ( '()' )
if not hasattr ( c , '__call__' ) and hasattr ( types , 'ClassType' ) and type ( c ) != types . ClassType :
c = self . resolve ( c )
factory = c
else :
klass = self . resolve ( config . pop ( 'class' ) )
if issubclass ( klass , logging . handlers . MemoryHandler ) and 'target' in config :
try :
config [ 'target' ] = self . config [ 'handlers' ] [ config [ 'target' ] ]
except StandardError as e :
raise ValueError ( 'Unable to set target handler ' '%r: %s' % ( config [ 'target' ] , e ) )
elif issubclass ( klass , logging . handlers . SMTPHandler ) and 'mailhost' in config :
config [ 'mailhost' ] = self . as_tuple ( config [ 'mailhost' ] )
elif issubclass ( klass , logging . handlers . SysLogHandler ) and 'address' in config :
config [ 'address' ] = self . as_tuple ( config [ 'address' ] )
factory = klass
kwargs = dict ( ( k , config [ k ] ) for k in config if valid_ident ( k ) )
try :
result = factory ( ** kwargs )
except TypeError as te :
if "'stream'" not in str ( te ) :
raise
kwargs [ 'strm' ] = kwargs . pop ( 'stream' )
result = factory ( ** kwargs )
if formatter :
result . setFormatter ( formatter )
if level is not None :
result . setLevel ( _checkLevel ( level ) )
if filters :
self . add_filters ( result , filters )
return result
def add_handlers ( self , logger , handlers ) :
for h in handlers :
try :
logger . addHandler ( self . config [ 'handlers' ] [ h ] )
except StandardError as e :
raise ValueError ( 'Unable to add handler %r: %s' % ( h , e ) )
def common_logger_config ( self , logger , config , incremental = False ) :
level = config . get ( 'level' , None )
if level is not None :
logger . setLevel ( _checkLevel ( level ) )
if not incremental :
for h in logger . handlers [ : ] :
logger . removeHandler ( h )
handlers = config . get ( 'handlers' , None )
if handlers :
self . add_handlers ( logger , handlers )
filters = config . get ( 'filters' , None )
if filters :
self . add_filters ( logger , filters )
def configure_logger ( self , name , config , incremental = False ) :
logger = logging . getLogger ( name )
self . common_logger_config ( logger , config , incremental )
propagate = config . get ( 'propagate' , None )
if propagate is not None :
logger . propagate = propagate
def configure_root ( self , config , incremental = False ) :
root = logging . getLogger ( )
self . common_logger_config ( root , config , incremental )
dictConfigClass = DictConfigurator
def dictConfig ( config ) :
dictConfigClass ( config ) . configure ( )
from __future__ import unicode_literals
import codecs
import datetime
from decimal import Decimal
import locale
from django . utils . functional import Promise
from django . utils import six
from django . utils . six . moves . urllib . parse import quote
class DjangoUnicodeDecodeError ( UnicodeDecodeError ) :
def __init__ ( self , obj , * args ) :
self . obj = obj
UnicodeDecodeError . __init__ ( self , * args )
def __str__ ( self ) :
original = UnicodeDecodeError . __str__ ( self )
return '%s. You passed in %r (%s)' % ( original , self . obj , type ( self . obj ) )
def python_2_unicode_compatible ( klass ) :
if six . PY2 :
if '__str__' not in klass . __dict__ :
raise ValueError ( "@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass . __name__ )
klass . __unicode__ = klass . __str__
klass . __str__ = lambda self : self . __unicode__ ( ) . encode ( 'utf-8' )
return klass
def smart_text ( s , encoding = 'utf-8' , strings_only = False , errors = 'strict' ) :
if isinstance ( s , Promise ) :
return s
return force_text ( s , encoding , strings_only , errors )
_PROTECTED_TYPES = six . integer_types + ( type ( None ) , float , Decimal , datetime . datetime , datetime . date , datetime . time )
def is_protected_type ( obj ) :
return isinstance ( obj , _PROTECTED_TYPES )
def force_text ( s , encoding = 'utf-8' , strings_only = False , errors = 'strict' ) :
if isinstance ( s , six . text_type ) :
return s
if strings_only and is_protected_type ( s ) :
return s
try :
if not isinstance ( s , six . string_types ) :
if six . PY3 :
if isinstance ( s , bytes ) :
s = six . text_type ( s , encoding , errors )
else :
s = six . text_type ( s )
elif hasattr ( s , '__unicode__' ) :
s = six . text_type ( s )
else :
s = six . text_type ( bytes ( s ) , encoding , errors )
else :
s = s . decode ( encoding , errors )
except UnicodeDecodeError as e :
if not isinstance ( s , Exception ) :
raise DjangoUnicodeDecodeError ( s , * e . args )
else :
s = ' ' . join ( [ force_text ( arg , encoding , strings_only , errors ) for arg in s ] )
return s
def smart_bytes ( s , encoding = 'utf-8' , strings_only = False , errors = 'strict' ) :
if isinstance ( s , Promise ) :
return s
return force_bytes ( s , encoding , strings_only , errors )
def force_bytes ( s , encoding = 'utf-8' , strings_only = False , errors = 'strict' ) :
if isinstance ( s , bytes ) :
if encoding == 'utf-8' :
return s
else :
return s . decode ( 'utf-8' , errors ) . encode ( encoding , errors )
if strings_only and is_protected_type ( s ) :
return s
if isinstance ( s , six . memoryview ) :
return bytes ( s )
if isinstance ( s , Promise ) :
return six . text_type ( s ) . encode ( encoding , errors )
if not isinstance ( s , six . string_types ) :
try :
if six . PY3 :
return six . text_type ( s ) . encode ( encoding )
else :
return bytes ( s )
except UnicodeEncodeError :
if isinstance ( s , Exception ) :
return b' ' . join ( [ force_bytes ( arg , encoding , strings_only , errors ) for arg in s ] )
return six . text_type ( s ) . encode ( encoding , errors )
else :
return s . encode ( encoding , errors )
if six . PY3 :
smart_str = smart_text
force_str = force_text
else :
smart_str = smart_bytes
force_str = force_bytes
smart_unicode = smart_text
force_unicode = force_text
def iri_to_uri ( iri ) :
if iri is None :
return iri
return quote ( force_bytes ( iri ) , safe = b"/#%[]=:;$&()+,!?*@'~" )
def filepath_to_uri ( path ) :
if path is None :
return path
return quote ( force_bytes ( path ) . replace ( b"\\" , b"/" ) , safe = b"/~!*()'" )
def get_system_encoding ( ) :
try :
encoding = locale . getdefaultlocale ( ) [ 1 ] or 'ascii'
codecs . lookup ( encoding )
except Exception :
encoding = 'ascii'
return encoding
DEFAULT_LOCALE_ENCODING = get_system_encoding ( )
from __future__ import unicode_literals
import datetime
from django . utils . xmlutils import SimplerXMLGenerator
from django . utils . encoding import force_text , iri_to_uri
from django . utils import datetime_safe
from django . utils import six
from django . utils . six import StringIO
from django . utils . six . moves . urllib . parse import urlparse
from django . utils . timezone import is_aware
def rfc2822_date ( date ) :
months = ( 'Jan' , 'Feb' , 'Mar' , 'Apr' , 'May' , 'Jun' , 'Jul' , 'Aug' , 'Sep' , 'Oct' , 'Nov' , 'Dec' , )
days = ( 'Mon' , 'Tue' , 'Wed' , 'Thu' , 'Fri' , 'Sat' , 'Sun' )
date = datetime_safe . new_datetime ( date )
dow = days [ date . weekday ( ) ]
month = months [ date . month - 1 ]
time_str = date . strftime ( '%s, %%d %s %%Y %%H:%%M:%%S ' % ( dow , month ) )
if six . PY2 :
time_str = time_str . decode ( 'utf-8' )
if is_aware ( date ) :
offset = date . tzinfo . utcoffset ( date )
timezone = ( offset . days * 24 * 60 ) + ( offset . seconds // 60 )
hour , minute = divmod ( timezone , 60 )
return time_str + '%+03d%02d' % ( hour , minute )
else :
return time_str + '-0000'
def rfc3339_date ( date ) :
date = datetime_safe . new_datetime ( date )
time_str = date . strftime ( '%Y-%m-%dT%H:%M:%S' )
if six . PY2 :
time_str = time_str . decode ( 'utf-8' )
if is_aware ( date ) :
offset = date . tzinfo . utcoffset ( date )
timezone = ( offset . days * 24 * 60 ) + ( offset . seconds // 60 )
hour , minute = divmod ( timezone , 60 )
return time_str + '%+03d:%02d' % ( hour , minute )
else :
return time_str + 'Z'
def get_tag_uri ( url , date ) :
bits = urlparse ( url )
d = ''
if date is not None :
d = ',%s' % datetime_safe . new_datetime ( date ) . strftime ( '%Y-%m-%d' )
return 'tag:%s%s:%s/%s' % ( bits . hostname , d , bits . path , bits . fragment )
class SyndicationFeed ( object ) :
def __init__ ( self , title , link , description , language = None , author_email = None , author_name = None , author_link = None , subtitle = None , categories = None , feed_url = None , feed_copyright = None , feed_guid = None , ttl = None , ** kwargs ) :
to_unicode = lambda s : force_text ( s , strings_only = True )
if categories :
categories = [ force_text ( c ) for c in categories ]
if ttl is not None :
ttl = force_text ( ttl )
self . feed = { 'title' : to_unicode ( title ) , 'link' : iri_to_uri ( link ) , 'description' : to_unicode ( description ) , 'language' : to_unicode ( language ) , 'author_email' : to_unicode ( author_email ) , 'author_name' : to_unicode ( author_name ) , 'author_link' : iri_to_uri ( author_link ) , 'subtitle' : to_unicode ( subtitle ) , 'categories' : categories or ( ) , 'feed_url' : iri_to_uri ( feed_url ) , 'feed_copyright' : to_unicode ( feed_copyright ) , 'id' : feed_guid or link , 'ttl' : ttl , }
self . feed . update ( kwargs )
self . items = [ ]
def add_item ( self , title , link , description , author_email = None , author_name = None , author_link = None , pubdate = None , comments = None , unique_id = None , unique_id_is_permalink = None , enclosure = None , categories = ( ) , item_copyright = None , ttl = None , updateddate = None , ** kwargs ) :
to_unicode = lambda s : force_text ( s , strings_only = True )
if categories :
categories = [ ( c ) for c in categories ]
if ttl is not None :
ttl = force_text ( ttl )
item = { 'title' : to_unicode ( title ) , 'link' : iri_to_uri ( link ) , 'description' : to_unicode ( description ) , 'author_email' : to_unicode ( author_email ) , 'author_name' : to_unicode ( author_name ) , 'author_link' : iri_to_uri ( author_link ) , 'pubdate' : pubdate , 'updateddate' : updateddate , 'comments' : to_unicode ( comments ) , 'unique_id' : to_unicode ( unique_id ) , 'unique_id_is_permalink' : unique_id_is_permalink , 'enclosure' : enclosure , 'categories' : categories or ( ) , 'item_copyright' : to_unicode ( item_copyright ) , 'ttl' : ttl , }
item . update ( kwargs )
self . items . append ( item )
def num_items ( self ) :
return len ( self . items )
def root_attributes ( self ) :
return { }
def add_root_elements ( self , handler ) :
pass
def item_attributes ( self , item ) :
return { }
def add_item_elements ( self , handler , item ) :
pass
def write ( self , outfile , encoding ) :
raise NotImplementedError ( 'subclasses of SyndicationFeed must provide a write() method' )
def writeString ( self , encoding ) :
s = StringIO ( )
self . write ( s , encoding )
return s . getvalue ( )
def latest_post_date ( self ) :
latest_date = None
date_keys = ( 'updateddate' , 'pubdate' )
for item in self . items :
for date_key in date_keys :
item_date = item . get ( date_key )
if item_date :
if latest_date is None or item_date > latest_date :
latest_date = item_date
return latest_date or datetime . datetime . now ( )
class Enclosure ( object ) :
def __init__ ( self , url , length , mime_type ) :
self . length , self . mime_type = length , mime_type
self . url = iri_to_uri ( url )
class RssFeed ( SyndicationFeed ) :
mime_type = 'application/rss+xml; charset=utf-8'
def write ( self , outfile , encoding ) :
handler = SimplerXMLGenerator ( outfile , encoding )
handler . startDocument ( )
handler . startElement ( "rss" , self . rss_attributes ( ) )
handler . startElement ( "channel" , self . root_attributes ( ) )
self . add_root_elements ( handler )
self . write_items ( handler )
self . endChannelElement ( handler )
handler . endElement ( "rss" )
def rss_attributes ( self ) :
return { "version" : self . _version , "xmlns:atom" : "http://www.w3.org/2005/Atom" }
def write_items ( self , handler ) :
for item in self . items :
handler . startElement ( 'item' , self . item_attributes ( item ) )
self . add_item_elements ( handler , item )
handler . endElement ( "item" )
def add_root_elements ( self , handler ) :
handler . addQuickElement ( "title" , self . feed [ 'title' ] )
handler . addQuickElement ( "link" , self . feed [ 'link' ] )
handler . addQuickElement ( "description" , self . feed [ 'description' ] )
if self . feed [ 'feed_url' ] is not None :
handler . addQuickElement ( "atom:link" , None , { "rel" : "self" , "href" : self . feed [ 'feed_url' ] } )
if self . feed [ 'language' ] is not None :
handler . addQuickElement ( "language" , self . feed [ 'language' ] )
for cat in self . feed [ 'categories' ] :
handler . addQuickElement ( "category" , cat )
if self . feed [ 'feed_copyright' ] is not None :
handler . addQuickElement ( "copyright" , self . feed [ 'feed_copyright' ] )
handler . addQuickElement ( "lastBuildDate" , rfc2822_date ( self . latest_post_date ( ) ) )
if self . feed [ 'ttl' ] is not None :
handler . addQuickElement ( "ttl" , self . feed [ 'ttl' ] )
def endChannelElement ( self , handler ) :
handler . endElement ( "channel" )
class RssUserland091Feed ( RssFeed ) :
_version = "0.91"
def add_item_elements ( self , handler , item ) :
handler . addQuickElement ( "title" , item [ 'title' ] )
handler . addQuickElement ( "link" , item [ 'link' ] )
if item [ 'description' ] is not None :
handler . addQuickElement ( "description" , item [ 'description' ] )
class Rss201rev2Feed ( RssFeed ) :
_version = "2.0"
def add_item_elements ( self , handler , item ) :
handler . addQuickElement ( "title" , item [ 'title' ] )
handler . addQuickElement ( "link" , item [ 'link' ] )
if item [ 'description' ] is not None :
handler . addQuickElement ( "description" , item [ 'description' ] )
if item [ "author_name" ] and item [ "author_email" ] :
handler . addQuickElement ( "author" , "%s (%s)" % ( item [ 'author_email' ] , item [ 'author_name' ] ) )
elif item [ "author_email" ] :
handler . addQuickElement ( "author" , item [ "author_email" ] )
elif item [ "author_name" ] :
handler . addQuickElement ( "dc:creator" , item [ "author_name" ] , { "xmlns:dc" : "http://purl.org/dc/elements/1.1/" } )
if item [ 'pubdate' ] is not None :
handler . addQuickElement ( "pubDate" , rfc2822_date ( item [ 'pubdate' ] ) )
if item [ 'comments' ] is not None :
handler . addQuickElement ( "comments" , item [ 'comments' ] )
if item [ 'unique_id' ] is not None :
guid_attrs = { }
if isinstance ( item . get ( 'unique_id_is_permalink' ) , bool ) :
guid_attrs [ 'isPermaLink' ] = str ( item [ 'unique_id_is_permalink' ] ) . lower ( )
handler . addQuickElement ( "guid" , item [ 'unique_id' ] , guid_attrs )
if item [ 'ttl' ] is not None :
handler . addQuickElement ( "ttl" , item [ 'ttl' ] )
if item [ 'enclosure' ] is not None :
handler . addQuickElement ( "enclosure" , '' , { "url" : item [ 'enclosure' ] . url , "length" : item [ 'enclosure' ] . length , "type" : item [ 'enclosure' ] . mime_type } )
for cat in item [ 'categories' ] :
handler . addQuickElement ( "category" , cat )
class Atom1Feed ( SyndicationFeed ) :
mime_type = 'application/atom+xml; charset=utf-8'
ns = "http://www.w3.org/2005/Atom"
def write ( self , outfile , encoding ) :
handler = SimplerXMLGenerator ( outfile , encoding )
handler . startDocument ( )
handler . startElement ( 'feed' , self . root_attributes ( ) )
self . add_root_elements ( handler )
self . write_items ( handler )
handler . endElement ( "feed" )
def root_attributes ( self ) :
if self . feed [ 'language' ] is not None :
return { "xmlns" : self . ns , "xml:lang" : self . feed [ 'language' ] }
else :
return { "xmlns" : self . ns }
def add_root_elements ( self , handler ) :
handler . addQuickElement ( "title" , self . feed [ 'title' ] )
handler . addQuickElement ( "link" , "" , { "rel" : "alternate" , "href" : self . feed [ 'link' ] } )
if self . feed [ 'feed_url' ] is not None :
handler . addQuickElement ( "link" , "" , { "rel" : "self" , "href" : self . feed [ 'feed_url' ] } )
handler . addQuickElement ( "id" , self . feed [ 'id' ] )
handler . addQuickElement ( "updated" , rfc3339_date ( self . latest_post_date ( ) ) )
if self . feed [ 'author_name' ] is not None :
handler . startElement ( "author" , { } )
handler . addQuickElement ( "name" , self . feed [ 'author_name' ] )
if self . feed [ 'author_email' ] is not None :
handler . addQuickElement ( "email" , self . feed [ 'author_email' ] )
if self . feed [ 'author_link' ] is not None :
handler . addQuickElement ( "uri" , self . feed [ 'author_link' ] )
handler . endElement ( "author" )
if self . feed [ 'subtitle' ] is not None :
handler . addQuickElement ( "subtitle" , self . feed [ 'subtitle' ] )
for cat in self . feed [ 'categories' ] :
handler . addQuickElement ( "category" , "" , { "term" : cat } )
if self . feed [ 'feed_copyright' ] is not None :
handler . addQuickElement ( "rights" , self . feed [ 'feed_copyright' ] )
def write_items ( self , handler ) :
for item in self . items :
handler . startElement ( "entry" , self . item_attributes ( item ) )
self . add_item_elements ( handler , item )
handler . endElement ( "entry" )
def add_item_elements ( self , handler , item ) :
handler . addQuickElement ( "title" , item [ 'title' ] )
handler . addQuickElement ( "link" , "" , { "href" : item [ 'link' ] , "rel" : "alternate" } )
if item [ 'pubdate' ] is not None :
handler . addQuickElement ( 'published' , rfc3339_date ( item [ 'pubdate' ] ) )
if item [ 'updateddate' ] is not None :
handler . addQuickElement ( 'updated' , rfc3339_date ( item [ 'updateddate' ] ) )
if item [ 'author_name' ] is not None :
handler . startElement ( "author" , { } )
handler . addQuickElement ( "name" , item [ 'author_name' ] )
if item [ 'author_email' ] is not None :
handler . addQuickElement ( "email" , item [ 'author_email' ] )
if item [ 'author_link' ] is not None :
handler . addQuickElement ( "uri" , item [ 'author_link' ] )
handler . endElement ( "author" )
if item [ 'unique_id' ] is not None :
unique_id = item [ 'unique_id' ]
else :
unique_id = get_tag_uri ( item [ 'link' ] , item [ 'pubdate' ] )
handler . addQuickElement ( "id" , unique_id )
if item [ 'description' ] is not None :
handler . addQuickElement ( "summary" , item [ 'description' ] , { "type" : "html" } )
if item [ 'enclosure' ] is not None :
handler . addQuickElement ( "link" , '' , { "rel" : "enclosure" , "href" : item [ 'enclosure' ] . url , "length" : item [ 'enclosure' ] . length , "type" : item [ 'enclosure' ] . mime_type } )
for cat in item [ 'categories' ] :
handler . addQuickElement ( "category" , "" , { "term" : cat } )
if item [ 'item_copyright' ] is not None :
handler . addQuickElement ( "rights" , item [ 'item_copyright' ] )
DefaultFeed = Rss201rev2Feed
from __future__ import absolute_import
import decimal
import datetime
from importlib import import_module
import unicodedata
from django . conf import settings
from django . utils import dateformat , numberformat , datetime_safe
from django . utils . encoding import force_str
from django . utils . functional import lazy
from django . utils . safestring import mark_safe
from django . utils import six
from django . utils . translation import get_language , to_locale , check_for_language
_format_cache = { }
_format_modules_cache = { }
ISO_INPUT_FORMATS = { 'DATE_INPUT_FORMATS' : ( '%Y-%m-%d' , ) , 'TIME_INPUT_FORMATS' : ( '%H:%M:%S' , '%H:%M:%S.%f' , '%H:%M' ) , 'DATETIME_INPUT_FORMATS' : ( '%Y-%m-%d %H:%M:%S' , '%Y-%m-%d %H:%M:%S.%f' , '%Y-%m-%d %H:%M' , '%Y-%m-%d' ) , }
def reset_format_cache ( ) :
global _format_cache , _format_modules_cache
_format_cache = { }
_format_modules_cache = { }
def iter_format_modules ( lang , format_module_path = None ) :
if not check_for_language ( lang ) :
return
if format_module_path is None :
format_module_path = settings . FORMAT_MODULE_PATH
format_locations = [ ]
if format_module_path :
if isinstance ( format_module_path , six . string_types ) :
format_module_path = [ format_module_path ]
for path in format_module_path :
format_locations . append ( path + '.%s' )
format_locations . append ( 'django.conf.locale.%s' )
locale = to_locale ( lang )
locales = [ locale ]
if '_' in locale :
locales . append ( locale . split ( '_' ) [ 0 ] )
for location in format_locations :
for loc in locales :
try :
yield import_module ( '%s.formats' % ( location % loc ) )
except ImportError :
pass
def get_format_modules ( lang = None , reverse = False ) :
if lang is None :
lang = get_language ( )
modules = _format_modules_cache . setdefault ( lang , list ( iter_format_modules ( lang , settings . FORMAT_MODULE_PATH ) ) )
if reverse :
return list ( reversed ( modules ) )
return modules
def get_format ( format_type , lang = None , use_l10n = None ) :
format_type = force_str ( format_type )
if use_l10n or ( use_l10n is None and settings . USE_L10N ) :
if lang is None :
lang = get_language ( )
cache_key = ( format_type , lang )
try :
cached = _format_cache [ cache_key ]
if cached is not None :
return cached
else :
return getattr ( settings , format_type )
except KeyError :
for module in get_format_modules ( lang ) :
try :
val = getattr ( module , format_type )
for iso_input in ISO_INPUT_FORMATS . get ( format_type , ( ) ) :
if iso_input not in val :
if isinstance ( val , tuple ) :
val = list ( val )
val . append ( iso_input )
_format_cache [ cache_key ] = val
return val
except AttributeError :
pass
_format_cache [ cache_key ] = None
return getattr ( settings , format_type )
get_format_lazy = lazy ( get_format , six . text_type , list , tuple )
def date_format ( value , format = None , use_l10n = None ) :
return dateformat . format ( value , get_format ( format or 'DATE_FORMAT' , use_l10n = use_l10n ) )
def time_format ( value , format = None , use_l10n = None ) :
return dateformat . time_format ( value , get_format ( format or 'TIME_FORMAT' , use_l10n = use_l10n ) )
def number_format ( value , decimal_pos = None , use_l10n = None , force_grouping = False ) :
if use_l10n or ( use_l10n is None and settings . USE_L10N ) :
lang = get_language ( )
else :
lang = None
return numberformat . format ( value , get_format ( 'DECIMAL_SEPARATOR' , lang , use_l10n = use_l10n ) , decimal_pos , get_format ( 'NUMBER_GROUPING' , lang , use_l10n = use_l10n ) , get_format ( 'THOUSAND_SEPARATOR' , lang , use_l10n = use_l10n ) , force_grouping = force_grouping )
def localize ( value , use_l10n = None ) :
if isinstance ( value , bool ) :
return mark_safe ( six . text_type ( value ) )
elif isinstance ( value , ( decimal . Decimal , float ) + six . integer_types ) :
return number_format ( value , use_l10n = use_l10n )
elif isinstance ( value , datetime . datetime ) :
return date_format ( value , 'DATETIME_FORMAT' , use_l10n = use_l10n )
elif isinstance ( value , datetime . date ) :
return date_format ( value , use_l10n = use_l10n )
elif isinstance ( value , datetime . time ) :
return time_format ( value , 'TIME_FORMAT' , use_l10n = use_l10n )
else :
return value
def localize_input ( value , default = None ) :
if isinstance ( value , ( decimal . Decimal , float ) + six . integer_types ) :
return number_format ( value )
elif isinstance ( value , datetime . datetime ) :
value = datetime_safe . new_datetime ( value )
format = force_str ( default or get_format ( 'DATETIME_INPUT_FORMATS' ) [ 0 ] )
return value . strftime ( format )
elif isinstance ( value , datetime . date ) :
value = datetime_safe . new_date ( value )
format = force_str ( default or get_format ( 'DATE_INPUT_FORMATS' ) [ 0 ] )
return value . strftime ( format )
elif isinstance ( value , datetime . time ) :
format = force_str ( default or get_format ( 'TIME_INPUT_FORMATS' ) [ 0 ] )
return value . strftime ( format )
return value
def sanitize_separators ( value ) :
if settings . USE_L10N and isinstance ( value , six . string_types ) :
parts = [ ]
decimal_separator = get_format ( 'DECIMAL_SEPARATOR' )
if decimal_separator in value :
value , decimals = value . split ( decimal_separator , 1 )
parts . append ( decimals )
if settings . USE_THOUSAND_SEPARATOR :
thousand_sep = get_format ( 'THOUSAND_SEPARATOR' )
for replacement in set ( [ thousand_sep , unicodedata . normalize ( 'NFKD' , thousand_sep ) ] ) :
value = value . replace ( replacement , '' )
parts . append ( value )
value = '.' . join ( reversed ( parts ) )
return value
import copy
import operator
from functools import wraps
import sys
import warnings
from django . utils import six
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . six . moves import copyreg
def curry ( _curried_func , * args , ** kwargs ) :
def _curried ( * moreargs , ** morekwargs ) :
return _curried_func ( * ( args + moreargs ) , ** dict ( kwargs , ** morekwargs ) )
return _curried
def memoize ( func , cache , num_args ) :
warnings . warn ( "memoize wrapper is deprecated and will be removed in " "Django 1.9. Use django.utils.lru_cache instead." , RemovedInDjango19Warning , stacklevel = 2 )
@ wraps ( func )
def wrapper ( * args ) :
mem_args = args [ : num_args ]
if mem_args in cache :
return cache [ mem_args ]
result = func ( * args )
cache [ mem_args ] = result
return result
return wrapper
class cached_property ( object ) :
def __init__ ( self , func , name = None ) :
self . func = func
self . name = name or func . __name__
def __get__ ( self , instance , type = None ) :
if instance is None :
return self
res = instance . __dict__ [ self . name ] = self . func ( instance )
return res
class Promise ( object ) :
pass
def lazy ( func , * resultclasses ) :
@ total_ordering
class __proxy__ ( Promise ) :
__dispatch = None
def __init__ ( self , args , kw ) :
self . __args = args
self . __kw = kw
if self . __dispatch is None :
self . __prepare_class__ ( )
def __reduce__ ( self ) :
return ( _lazy_proxy_unpickle , ( func , self . __args , self . __kw ) + resultclasses )
@ classmethod
def __prepare_class__ ( cls ) :
cls . __dispatch = { }
for resultclass in resultclasses :
cls . __dispatch [ resultclass ] = { }
for type_ in reversed ( resultclass . mro ( ) ) :
for ( k , v ) in type_ . __dict__ . items ( ) :
meth = cls . __promise__ ( resultclass , k , v )
if hasattr ( cls , k ) :
continue
setattr ( cls , k , meth )
cls . _delegate_bytes = bytes in resultclasses
cls . _delegate_text = six . text_type in resultclasses
assert not ( cls . _delegate_bytes and cls . _delegate_text ) , "Cannot call lazy() with both bytes and text return types."
if cls . _delegate_text :
if six . PY3 :
cls . __str__ = cls . __text_cast
else :
cls . __unicode__ = cls . __text_cast
elif cls . _delegate_bytes :
if six . PY3 :
cls . __bytes__ = cls . __bytes_cast
else :
cls . __str__ = cls . __bytes_cast
@ classmethod
def __promise__ ( cls , klass , funcname , method ) :
def __wrapper__ ( self , * args , ** kw ) :
res = func ( * self . __args , ** self . __kw )
for t in type ( res ) . mro ( ) :
if t in self . __dispatch :
return self . __dispatch [ t ] [ funcname ] ( res , * args , ** kw )
raise TypeError ( "Lazy object returned unexpected type." )
if klass not in cls . __dispatch :
cls . __dispatch [ klass ] = { }
cls . __dispatch [ klass ] [ funcname ] = method
return __wrapper__
def __text_cast ( self ) :
return func ( * self . __args , ** self . __kw )
def __bytes_cast ( self ) :
return bytes ( func ( * self . __args , ** self . __kw ) )
def __cast ( self ) :
if self . _delegate_bytes :
return self . __bytes_cast ( )
elif self . _delegate_text :
return self . __text_cast ( )
else :
return func ( * self . __args , ** self . __kw )
def __ne__ ( self , other ) :
if isinstance ( other , Promise ) :
other = other . __cast ( )
return self . __cast ( ) != other
def __eq__ ( self , other ) :
if isinstance ( other , Promise ) :
other = other . __cast ( )
return self . __cast ( ) == other
def __lt__ ( self , other ) :
if isinstance ( other , Promise ) :
other = other . __cast ( )
return self . __cast ( ) < other
def __hash__ ( self ) :
return hash ( self . __cast ( ) )
def __mod__ ( self , rhs ) :
if self . _delegate_bytes and six . PY2 :
return bytes ( self ) % rhs
elif self . _delegate_text :
return six . text_type ( self ) % rhs
return self . __cast ( ) % rhs
def __deepcopy__ ( self , memo ) :
memo [ id ( self ) ] = self
return self
@ wraps ( func )
def __wrapper__ ( * args , ** kw ) :
return __proxy__ ( args , kw )
return __wrapper__
def _lazy_proxy_unpickle ( func , args , kwargs , * resultclasses ) :
return lazy ( func , * resultclasses ) ( * args , ** kwargs )
def allow_lazy ( func , * resultclasses ) :
@ wraps ( func )
def wrapper ( * args , ** kwargs ) :
for arg in list ( args ) + list ( six . itervalues ( kwargs ) ) :
if isinstance ( arg , Promise ) :
break
else :
return func ( * args , ** kwargs )
return lazy ( func , * resultclasses ) ( * args , ** kwargs )
return wrapper
empty = object ( )
def new_method_proxy ( func ) :
def inner ( self , * args ) :
if self . _wrapped is empty :
self . _setup ( )
return func ( self . _wrapped , * args )
return inner
class LazyObject ( object ) :
_wrapped = None
def __init__ ( self ) :
self . _wrapped = empty
__getattr__ = new_method_proxy ( getattr )
def __setattr__ ( self , name , value ) :
if name == "_wrapped" :
self . __dict__ [ "_wrapped" ] = value
else :
if self . _wrapped is empty :
self . _setup ( )
setattr ( self . _wrapped , name , value )
def __delattr__ ( self , name ) :
if name == "_wrapped" :
raise TypeError ( "can't delete _wrapped." )
if self . _wrapped is empty :
self . _setup ( )
delattr ( self . _wrapped , name )
def _setup ( self ) :
raise NotImplementedError ( 'subclasses of LazyObject must provide a _setup() method' )
def __getstate__ ( self ) :
if self . _wrapped is empty :
self . _setup ( )
return self . _wrapped . __dict__
@ classmethod
def __newobj__ ( cls , * args ) :
return cls . __new__ ( cls , * args )
def __reduce_ex__ ( self , proto ) :
if proto >= 2 :
return ( self . __newobj__ , ( self . __class__ , ) , self . __getstate__ ( ) )
else :
return ( copyreg . _reconstructor , ( self . __class__ , object , None ) , self . __getstate__ ( ) )
def __deepcopy__ ( self , memo ) :
if self . _wrapped is empty :
result = type ( self ) ( )
memo [ id ( self ) ] = result
return result
return copy . deepcopy ( self . _wrapped , memo )
if six . PY3 :
__bytes__ = new_method_proxy ( bytes )
__str__ = new_method_proxy ( str )
__bool__ = new_method_proxy ( bool )
else :
__str__ = new_method_proxy ( str )
__unicode__ = new_method_proxy ( unicode )
__nonzero__ = new_method_proxy ( bool )
__dir__ = new_method_proxy ( dir )
__class__ = property ( new_method_proxy ( operator . attrgetter ( "__class__" ) ) )
__eq__ = new_method_proxy ( operator . eq )
__ne__ = new_method_proxy ( operator . ne )
__hash__ = new_method_proxy ( hash )
__getitem__ = new_method_proxy ( operator . getitem )
__setitem__ = new_method_proxy ( operator . setitem )
__delitem__ = new_method_proxy ( operator . delitem )
__len__ = new_method_proxy ( len )
__contains__ = new_method_proxy ( operator . contains )
_super = super
class SimpleLazyObject ( LazyObject ) :
def __init__ ( self , func ) :
self . __dict__ [ '_setupfunc' ] = func
_super ( SimpleLazyObject , self ) . __init__ ( )
def _setup ( self ) :
self . _wrapped = self . _setupfunc ( )
def __repr__ ( self ) :
if self . _wrapped is empty :
repr_attr = self . _setupfunc
else :
repr_attr = self . _wrapped
return '<%s: %r>' % ( type ( self ) . __name__ , repr_attr )
def __deepcopy__ ( self , memo ) :
if self . _wrapped is empty :
result = SimpleLazyObject ( self . _setupfunc )
memo [ id ( self ) ] = result
return result
return copy . deepcopy ( self . _wrapped , memo )
class lazy_property ( property ) :
def __new__ ( cls , fget = None , fset = None , fdel = None , doc = None ) :
if fget is not None :
@ wraps ( fget )
def fget ( instance , instance_type = None , name = fget . __name__ ) :
return getattr ( instance , name ) ( )
if fset is not None :
@ wraps ( fset )
def fset ( instance , value , name = fset . __name__ ) :
return getattr ( instance , name ) ( value )
if fdel is not None :
@ wraps ( fdel )
def fdel ( instance , name = fdel . __name__ ) :
return getattr ( instance , name ) ( )
return property ( fget , fset , fdel , doc )
def partition ( predicate , values ) :
results = ( [ ] , [ ] )
for item in values :
results [ predicate ( item ) ] . append ( item )
return results
if sys . version_info >= ( 2 , 7 , 2 ) :
from functools import total_ordering
else :
def total_ordering ( cls ) :
convert = { '__lt__' : [ ( '__gt__' , lambda self , other : not ( self < other or self == other ) ) , ( '__le__' , lambda self , other : self < other or self == other ) , ( '__ge__' , lambda self , other : not self < other ) ] , '__le__' : [ ( '__ge__' , lambda self , other : not self <= other or self == other ) , ( '__lt__' , lambda self , other : self <= other and not self == other ) , ( '__gt__' , lambda self , other : not self <= other ) ] , '__gt__' : [ ( '__lt__' , lambda self , other : not ( self > other or self == other ) ) , ( '__ge__' , lambda self , other : self > other or self == other ) , ( '__le__' , lambda self , other : not self > other ) ] , '__ge__' : [ ( '__le__' , lambda self , other : ( not self >= other ) or self == other ) , ( '__gt__' , lambda self , other : self >= other and not self == other ) , ( '__lt__' , lambda self , other : not self >= other ) ] }
roots = set ( dir ( cls ) ) & set ( convert )
if not roots :
raise ValueError ( 'must define at least one ordering operation: < > <= >=' )
root = max ( roots )
for opname , opfunc in convert [ root ] :
if opname not in roots :
opfunc . __name__ = opname
opfunc . __doc__ = getattr ( int , opname ) . __doc__
setattr ( cls , opname , opfunc )
return cls
from __future__ import unicode_literals
import re
import sys
from django . utils . encoding import force_text , force_str
from django . utils . functional import allow_lazy
from django . utils . safestring import SafeData , mark_safe
from django . utils import six
from django . utils . six . moves . urllib . parse import quote , unquote , urlsplit , urlunsplit
from django . utils . text import normalize_newlines
from . html_parser import HTMLParser , HTMLParseError
TRAILING_PUNCTUATION = [ '.' , ',' , ':' , ';' , '.)' , '"' , '\'' ]
WRAPPING_PUNCTUATION = [ ( '(' , ')' ) , ( '<' , '>' ) , ( '[' , ']' ) , ( '<' , '>' ) , ( '"' , '"' ) , ( '\'' , '\'' ) ]
DOTS = [ '·' , '*' , '\u2022' , '' , '•' , '•' ]
unencoded_ampersands_re = re . compile ( r'&(?!(\w+|#\d+);)' )
word_split_re = re . compile ( r'(\s+)' )
simple_url_re = re . compile ( r'^https?://\[?\w' , re . IGNORECASE )
simple_url_2_re = re . compile ( r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$' , re . IGNORECASE )
simple_email_re = re . compile ( r'^\S+@\S+\.\S+$' )
link_target_attribute_re = re . compile ( r'(]*?)target=[^\s>]+' )
html_gunk_re = re . compile ( r'(?:
|<\/i>|<\/b>|<\/em>|<\/strong>|<\/?smallcaps>|<\/?uppercase>)' , re . IGNORECASE )
hard_coded_bullets_re = re . compile ( r'((?:(?:%s).*?[a-zA-Z].*?
\s*)+)' % '|' . join ( re . escape ( x ) for x in DOTS ) , re . DOTALL )
trailing_empty_content_re = re . compile ( r'(?:(?: |\s|
)*?
\s*)+\Z' )
def escape ( text ) :
return mark_safe ( force_text ( text ) . replace ( '&' , '&' ) . replace ( '<' , '<' ) . replace ( '>' , '>' ) . replace ( '"' , '"' ) . replace ( "'" , ''' ) )
escape = allow_lazy ( escape , six . text_type )
_js_escapes = { ord ( '\\' ) : '\\u005C' , ord ( '\'' ) : '\\u0027' , ord ( '"' ) : '\\u0022' , ord ( '>' ) : '\\u003E' , ord ( '<' ) : '\\u003C' , ord ( '&' ) : '\\u0026' , ord ( '=' ) : '\\u003D' , ord ( '-' ) : '\\u002D' , ord ( ';' ) : '\\u003B' , ord ( '\u2028' ) : '\\u2028' , ord ( '\u2029' ) : '\\u2029' }
_js_escapes . update ( ( ord ( '%c' % z ) , '\\u%04X' % z ) for z in range ( 32 ) )
def escapejs ( value ) :
return mark_safe ( force_text ( value ) . translate ( _js_escapes ) )
escapejs = allow_lazy ( escapejs , six . text_type )
def conditional_escape ( text ) :
if hasattr ( text , '__html__' ) :
return text . __html__ ( )
else :
return escape ( text )
def format_html ( format_string , * args , ** kwargs ) :
args_safe = map ( conditional_escape , args )
kwargs_safe = dict ( ( k , conditional_escape ( v ) ) for ( k , v ) in six . iteritems ( kwargs ) )
return mark_safe ( format_string . format ( * args_safe , ** kwargs_safe ) )
def format_html_join ( sep , format_string , args_generator ) :
return mark_safe ( conditional_escape ( sep ) . join ( format_html ( format_string , * tuple ( args ) ) for args in args_generator ) )
def linebreaks ( value , autoescape = False ) :
value = normalize_newlines ( value )
paras = re . split ( '\n{2,}' , value )
if autoescape :
paras = [ '%s
' % escape ( p ) . replace ( '\n' , '
' ) for p in paras ]
else :
paras = [ '%s
' % p . replace ( '\n' , '
' ) for p in paras ]
return '\n\n' . join ( paras )
linebreaks = allow_lazy ( linebreaks , six . text_type )
class MLStripper ( HTMLParser ) :
def __init__ ( self ) :
if sys . version_info [ : 2 ] == ( 3 , 2 ) :
HTMLParser . __init__ ( self , strict = False )
else :
HTMLParser . __init__ ( self )
self . reset ( )
self . fed = [ ]
def handle_data ( self , d ) :
self . fed . append ( d )
def handle_entityref ( self , name ) :
self . fed . append ( '&%s;' % name )
def handle_charref ( self , name ) :
self . fed . append ( '%s;' % name )
def get_data ( self ) :
return '' . join ( self . fed )
def _strip_once ( value ) :
s = MLStripper ( )
try :
s . feed ( value )
except HTMLParseError :
return value
try :
s . close ( )
except ( HTMLParseError , UnboundLocalError ) :
return s . get_data ( ) + s . rawdata
else :
return s . get_data ( )
def strip_tags ( value ) :
while '<' in value and '>' in value :
new_value = _strip_once ( value )
if new_value == value :
break
value = new_value
return value
strip_tags = allow_lazy ( strip_tags )
def remove_tags ( html , tags ) :
tags = [ re . escape ( tag ) for tag in tags . split ( ) ]
tags_re = '(%s)' % '|' . join ( tags )
starttag_re = re . compile ( r'<%s(/?>|(\s+[^>]*>))' % tags_re , re . U )
endtag_re = re . compile ( '%s>' % tags_re )
html = starttag_re . sub ( '' , html )
html = endtag_re . sub ( '' , html )
return html
remove_tags = allow_lazy ( remove_tags , six . text_type )
def strip_spaces_between_tags ( value ) :
return re . sub ( r'>\s+<' , '><' , force_text ( value ) )
strip_spaces_between_tags = allow_lazy ( strip_spaces_between_tags , six . text_type )
def strip_entities ( value ) :
return re . sub ( r'&(?:\w+|#\d+);' , '' , force_text ( value ) )
strip_entities = allow_lazy ( strip_entities , six . text_type )
def smart_urlquote ( url ) :
try :
scheme , netloc , path , query , fragment = urlsplit ( url )
try :
netloc = netloc . encode ( 'idna' ) . decode ( 'ascii' )
except UnicodeError :
pass
else :
url = urlunsplit ( ( scheme , netloc , path , query , fragment ) )
except ValueError :
pass
url = unquote ( force_str ( url ) )
url = quote ( url , safe = b'!*\'();:@&=+$,/?#[]~' )
return force_text ( url )
def urlize ( text , trim_url_limit = None , nofollow = False , autoescape = False ) :
def trim_url ( x , limit = trim_url_limit ) :
if limit is None or len ( x ) <= limit :
return x
return '%s...' % x [ : max ( 0 , limit - 3 ) ]
safe_input = isinstance ( text , SafeData )
words = word_split_re . split ( force_text ( text ) )
for i , word in enumerate ( words ) :
if '.' in word or '@' in word or ':' in word :
lead , middle , trail = '' , word , ''
for punctuation in TRAILING_PUNCTUATION :
if middle . endswith ( punctuation ) :
middle = middle [ : - len ( punctuation ) ]
trail = punctuation + trail
for opening , closing in WRAPPING_PUNCTUATION :
if middle . startswith ( opening ) :
middle = middle [ len ( opening ) : ]
lead = lead + opening
if ( middle . endswith ( closing ) and middle . count ( closing ) == middle . count ( opening ) + 1 ) :
middle = middle [ : - len ( closing ) ]
trail = closing + trail
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ''
if simple_url_re . match ( middle ) :
url = smart_urlquote ( middle )
elif simple_url_2_re . match ( middle ) :
url = smart_urlquote ( 'http://%s' % middle )
elif ':' not in middle and simple_email_re . match ( middle ) :
local , domain = middle . rsplit ( '@' , 1 )
try :
domain = domain . encode ( 'idna' ) . decode ( 'ascii' )
except UnicodeError :
continue
url = 'mailto:%s@%s' % ( local , domain )
nofollow_attr = ''
if url :
trimmed = trim_url ( middle )
if autoescape and not safe_input :
lead , trail = escape ( lead ) , escape ( trail )
url , trimmed = escape ( url ) , escape ( trimmed )
middle = '%s' % ( url , nofollow_attr , trimmed )
words [ i ] = mark_safe ( '%s%s%s' % ( lead , middle , trail ) )
else :
if safe_input :
words [ i ] = mark_safe ( word )
elif autoescape :
words [ i ] = escape ( word )
elif safe_input :
words [ i ] = mark_safe ( word )
elif autoescape :
words [ i ] = escape ( word )
return '' . join ( words )
urlize = allow_lazy ( urlize , six . text_type )
def avoid_wrapping ( value ) :
return value . replace ( " " , "\xa0" )
from django . utils . six . moves import html_parser as _html_parser
import re
import sys
current_version = sys . version_info
use_workaround = ( ( current_version < ( 2 , 7 , 3 ) ) or ( current_version >= ( 3 , 0 ) and current_version < ( 3 , 2 , 3 ) ) )
HTMLParseError = _html_parser . HTMLParseError
if not use_workaround :
if current_version >= ( 3 , 4 ) :
class HTMLParser ( _html_parser . HTMLParser ) :
def __init__ ( self , convert_charrefs = False , ** kwargs ) :
_html_parser . HTMLParser . __init__ ( self , convert_charrefs = convert_charrefs , ** kwargs )
else :
HTMLParser = _html_parser . HTMLParser
else :
tagfind = re . compile ( '([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*' )
class HTMLParser ( _html_parser . HTMLParser ) :
def __init__ ( self ) :
_html_parser . HTMLParser . __init__ ( self )
self . cdata_tag = None
def set_cdata_mode ( self , tag ) :
try :
self . interesting = _html_parser . interesting_cdata
except AttributeError :
self . interesting = re . compile ( r'\s*%s\s*>' % tag . lower ( ) , re . I )
self . cdata_tag = tag . lower ( )
def clear_cdata_mode ( self ) :
self . interesting = _html_parser . interesting_normal
self . cdata_tag = None
def parse_starttag ( self , i ) :
self . __starttag_text = None
endpos = self . check_for_whole_start_tag ( i )
if endpos < 0 :
return endpos
rawdata = self . rawdata
self . __starttag_text = rawdata [ i : endpos ]
attrs = [ ]
match = tagfind . match ( rawdata , i + 1 )
assert match , 'unexpected call to parse_starttag()'
k = match . end ( )
self . lasttag = tag = match . group ( 1 ) . lower ( )
while k < endpos :
m = _html_parser . attrfind . match ( rawdata , k )
if not m :
break
attrname , rest , attrvalue = m . group ( 1 , 2 , 3 )
if not rest :
attrvalue = None
elif ( attrvalue [ : 1 ] == '\'' == attrvalue [ - 1 : ] or attrvalue [ : 1 ] == '"' == attrvalue [ - 1 : ] ) :
attrvalue = attrvalue [ 1 : - 1 ]
if attrvalue :
attrvalue = self . unescape ( attrvalue )
attrs . append ( ( attrname . lower ( ) , attrvalue ) )
k = m . end ( )
end = rawdata [ k : endpos ] . strip ( )
if end not in ( ">" , "/>" ) :
lineno , offset = self . getpos ( )
if "\n" in self . __starttag_text :
lineno = lineno + self . __starttag_text . count ( "\n" )
offset = ( len ( self . __starttag_text ) - self . __starttag_text . rfind ( "\n" ) )
else :
offset = offset + len ( self . __starttag_text )
self . error ( "junk characters in start tag: %r" % ( rawdata [ k : endpos ] [ : 20 ] , ) )
if end . endswith ( '/>' ) :
self . handle_startendtag ( tag , attrs )
else :
self . handle_starttag ( tag , attrs )
if tag in self . CDATA_CONTENT_ELEMENTS :
self . set_cdata_mode ( tag )
return endpos
def parse_endtag ( self , i ) :
rawdata = self . rawdata
assert rawdata [ i : i + 2 ] == "" , "unexpected call to parse_endtag"
match = _html_parser . endendtag . search ( rawdata , i + 1 )
if not match :
return - 1
j = match . end ( )
match = _html_parser . endtagfind . match ( rawdata , i )
if not match :
if self . cdata_tag is not None :
self . handle_data ( rawdata [ i : j ] )
return j
self . error ( "bad end tag: %r" % ( rawdata [ i : j ] , ) )
tag = match . group ( 1 ) . strip ( )
if self . cdata_tag is not None :
if tag . lower ( ) != self . cdata_tag :
self . handle_data ( rawdata [ i : j ] )
return j
self . handle_endtag ( tag . lower ( ) )
self . clear_cdata_mode ( )
return j
from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
from binascii import Error as BinasciiError
from email . utils import formatdate
from django . utils . datastructures import MultiValueDict
from django . utils . encoding import force_str , force_text
from django . utils . functional import allow_lazy
from django . utils import six
from django . utils . six . moves . urllib . parse import ( quote , quote_plus , unquote , unquote_plus , urlparse , urlencode as original_urlencode )
ETAG_MATCH = re . compile ( r'(?:W/)?"((?:\\.|[^"])*)"' )
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec' . split ( )
__D = r'(?P\d{2})'
__D2 = r'(?P[ \d]\d)'
__M = r'(?P\w{3})'
__Y = r'(?P\d{4})'
__Y2 = r'(?P\d{2})'
__T = r'(?P\d{2}):(?P\d{2}):(?P\d{2})'
RFC1123_DATE = re . compile ( r'^\w{3}, %s %s %s %s GMT$' % ( __D , __M , __Y , __T ) )
RFC850_DATE = re . compile ( r'^\w{6,9}, %s-%s-%s %s GMT$' % ( __D , __M , __Y2 , __T ) )
ASCTIME_DATE = re . compile ( r'^\w{3} %s %s %s %s$' % ( __M , __D2 , __T , __Y ) )
def urlquote ( url , safe = '/' ) :
return force_text ( quote ( force_str ( url ) , force_str ( safe ) ) )
urlquote = allow_lazy ( urlquote , six . text_type )
def urlquote_plus ( url , safe = '' ) :
return force_text ( quote_plus ( force_str ( url ) , force_str ( safe ) ) )
urlquote_plus = allow_lazy ( urlquote_plus , six . text_type )
def urlunquote ( quoted_url ) :
return force_text ( unquote ( force_str ( quoted_url ) ) )
urlunquote = allow_lazy ( urlunquote , six . text_type )
def urlunquote_plus ( quoted_url ) :
return force_text ( unquote_plus ( force_str ( quoted_url ) ) )
urlunquote_plus = allow_lazy ( urlunquote_plus , six . text_type )
def urlencode ( query , doseq = 0 ) :
if isinstance ( query , MultiValueDict ) :
query = query . lists ( )
elif hasattr ( query , 'items' ) :
query = query . items ( )
return original_urlencode ( [ ( force_str ( k ) , [ force_str ( i ) for i in v ] if isinstance ( v , ( list , tuple ) ) else force_str ( v ) ) for k , v in query ] , doseq )
def cookie_date ( epoch_seconds = None ) :
rfcdate = formatdate ( epoch_seconds )
return '%s-%s-%s GMT' % ( rfcdate [ : 7 ] , rfcdate [ 8 : 11 ] , rfcdate [ 12 : 25 ] )
def http_date ( epoch_seconds = None ) :
return formatdate ( epoch_seconds , usegmt = True )
def parse_http_date ( date ) :
for regex in RFC1123_DATE , RFC850_DATE , ASCTIME_DATE :
m = regex . match ( date )
if m is not None :
break
else :
raise ValueError ( "%r is not in a valid HTTP date format" % date )
try :
year = int ( m . group ( 'year' ) )
if year < 100 :
if year < 70 :
year += 2000
else :
year += 1900
month = MONTHS . index ( m . group ( 'mon' ) . lower ( ) ) + 1
day = int ( m . group ( 'day' ) )
hour = int ( m . group ( 'hour' ) )
min = int ( m . group ( 'min' ) )
sec = int ( m . group ( 'sec' ) )
result = datetime . datetime ( year , month , day , hour , min , sec )
return calendar . timegm ( result . utctimetuple ( ) )
except Exception :
six . reraise ( ValueError , ValueError ( "%r is not a valid date" % date ) , sys . exc_info ( ) [ 2 ] )
def parse_http_date_safe ( date ) :
try :
return parse_http_date ( date )
except Exception :
pass
def base36_to_int ( s ) :
if len ( s ) > 13 :
raise ValueError ( "Base36 input too large" )
value = int ( s , 36 )
if six . PY2 and value > sys . maxint :
raise ValueError ( "Base36 input too large" )
return value
def int_to_base36 ( i ) :
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
factor = 0
if i < 0 :
raise ValueError ( "Negative base36 conversion input." )
if six . PY2 :
if not isinstance ( i , six . integer_types ) :
raise TypeError ( "Non-integer base36 conversion input." )
if i > sys . maxint :
raise ValueError ( "Base36 conversion input too large." )
while True :
factor += 1
if i < 36 ** factor :
factor -= 1
break
base36 = [ ]
while factor >= 0 :
j = 36 ** factor
base36 . append ( digits [ i // j ] )
i = i % j
factor -= 1
return '' . join ( base36 )
def urlsafe_base64_encode ( s ) :
return base64 . urlsafe_b64encode ( s ) . rstrip ( b'\n=' )
def urlsafe_base64_decode ( s ) :
s = s . encode ( 'utf-8' )
try :
return base64 . urlsafe_b64decode ( s . ljust ( len ( s ) + len ( s ) % 4 , b'=' ) )
except ( LookupError , BinasciiError ) as e :
raise ValueError ( e )
def parse_etags ( etag_str ) :
etags = ETAG_MATCH . findall ( etag_str )
if not etags :
return [ etag_str ]
etags = [ e . encode ( 'ascii' ) . decode ( 'unicode_escape' ) for e in etags ]
return etags
def quote_etag ( etag ) :
return '"%s"' % etag . replace ( '\\' , '\\\\' ) . replace ( '"' , '\\"' )
def same_origin ( url1 , url2 ) :
p1 , p2 = urlparse ( url1 ) , urlparse ( url2 )
try :
return ( p1 . scheme , p1 . hostname , p1 . port ) == ( p2 . scheme , p2 . hostname , p2 . port )
except ValueError :
return False
def is_safe_url ( url , host = None ) :
if not url :
return False
url = url . replace ( '\\' , '/' )
if url . startswith ( '///' ) :
return False
url_info = urlparse ( url )
if not url_info . netloc and url_info . scheme :
return False
return ( ( not url_info . netloc or url_info . netloc == host ) and ( not url_info . scheme or url_info . scheme in [ 'http' , 'https' ] ) )
import warnings
import sys
from django . utils import six
from django . utils . deprecation import RemovedInDjango19Warning
warnings . warn ( "django.utils.importlib will be removed in Django 1.9." , RemovedInDjango19Warning , stacklevel = 2 )
def _resolve_name ( name , package , level ) :
if not hasattr ( package , 'rindex' ) :
raise ValueError ( "'package' not set to a string" )
dot = len ( package )
for x in range ( level , 1 , - 1 ) :
try :
dot = package . rindex ( '.' , 0 , dot )
except ValueError :
raise ValueError ( "attempted relative import beyond top-level package" )
return "%s.%s" % ( package [ : dot ] , name )
if six . PY3 :
from importlib import import_module
else :
def import_module ( name , package = None ) :
if name . startswith ( '.' ) :
if not package :
raise TypeError ( "relative imports require the 'package' argument" )
level = 0
for character in name :
if character != '.' :
break
level += 1
name = _resolve_name ( name [ level : ] , package , level )
__import__ ( name )
return sys . modules [ name ]
from django . core . exceptions import ValidationError
from django . utils . translation import ugettext_lazy as _
from django . utils . six . moves import xrange
def clean_ipv6_address ( ip_str , unpack_ipv4 = False , error_message = _ ( "This is not a valid IPv6 address." ) ) :
best_doublecolon_start = - 1
best_doublecolon_len = 0
doublecolon_start = - 1
doublecolon_len = 0
if not is_valid_ipv6_address ( ip_str ) :
raise ValidationError ( error_message , code = 'invalid' )
ip_str = _explode_shorthand_ip_string ( ip_str )
ip_str = _sanitize_ipv4_mapping ( ip_str )
if unpack_ipv4 :
ipv4_unpacked = _unpack_ipv4 ( ip_str )
if ipv4_unpacked :
return ipv4_unpacked
hextets = ip_str . split ( ":" )
for index in range ( len ( hextets ) ) :
hextets [ index ] = hextets [ index ] . lstrip ( '0' )
if not hextets [ index ] :
hextets [ index ] = '0'
if hextets [ index ] == '0' :
doublecolon_len += 1
if doublecolon_start == - 1 :
doublecolon_start = index
if doublecolon_len > best_doublecolon_len :
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else :
doublecolon_len = 0
doublecolon_start = - 1
if best_doublecolon_len > 1 :
best_doublecolon_end = ( best_doublecolon_start + best_doublecolon_len )
if best_doublecolon_end == len ( hextets ) :
hextets += [ '' ]
hextets [ best_doublecolon_start : best_doublecolon_end ] = [ '' ]
if best_doublecolon_start == 0 :
hextets = [ '' ] + hextets
result = ":" . join ( hextets )
return result . lower ( )
def _sanitize_ipv4_mapping ( ip_str ) :
if not ip_str . lower ( ) . startswith ( '0000:0000:0000:0000:0000:ffff:' ) :
return ip_str
hextets = ip_str . split ( ':' )
if '.' in hextets [ - 1 ] :
return ip_str
ipv4_address = "%d.%d.%d.%d" % ( int ( hextets [ 6 ] [ 0 : 2 ] , 16 ) , int ( hextets [ 6 ] [ 2 : 4 ] , 16 ) , int ( hextets [ 7 ] [ 0 : 2 ] , 16 ) , int ( hextets [ 7 ] [ 2 : 4 ] , 16 ) , )
result = ':' . join ( hextets [ 0 : 6 ] )
result += ':' + ipv4_address
return result
def _unpack_ipv4 ( ip_str ) :
if not ip_str . lower ( ) . startswith ( '0000:0000:0000:0000:0000:ffff:' ) :
return None
return ip_str . rsplit ( ':' , 1 ) [ 1 ]
def is_valid_ipv6_address ( ip_str ) :
from django . core . validators import validate_ipv4_address
if ':' not in ip_str :
return False
if ip_str . count ( '::' ) > 1 :
return False
if ':::' in ip_str :
return False
if ( ( ip_str . startswith ( ':' ) and not ip_str . startswith ( '::' ) ) or ( ip_str . endswith ( ':' ) and not ip_str . endswith ( '::' ) ) ) :
return False
if ip_str . count ( ':' ) > 7 :
return False
if '::' not in ip_str and ip_str . count ( ':' ) != 7 :
if ip_str . count ( '.' ) != 3 :
return False
ip_str = _explode_shorthand_ip_string ( ip_str )
for hextet in ip_str . split ( ':' ) :
if hextet . count ( '.' ) == 3 :
if not ip_str . split ( ':' ) [ - 1 ] == hextet :
return False
try :
validate_ipv4_address ( hextet )
except ValidationError :
return False
else :
try :
if int ( hextet , 16 ) < 0x0 or int ( hextet , 16 ) > 0xFFFF :
return False
except ValueError :
return False
return True
def _explode_shorthand_ip_string ( ip_str ) :
if not _is_shorthand_ip ( ip_str ) :
return ip_str
new_ip = [ ]
hextet = ip_str . split ( '::' )
if '.' in ip_str . split ( ':' ) [ - 1 ] :
fill_to = 7
else :
fill_to = 8
if len ( hextet ) > 1 :
sep = len ( hextet [ 0 ] . split ( ':' ) ) + len ( hextet [ 1 ] . split ( ':' ) )
new_ip = hextet [ 0 ] . split ( ':' )
for __ in xrange ( fill_to - sep ) :
new_ip . append ( '0000' )
new_ip += hextet [ 1 ] . split ( ':' )
else :
new_ip = ip_str . split ( ':' )
ret_ip = [ ]
for hextet in new_ip :
ret_ip . append ( ( '0' * ( 4 - len ( hextet ) ) + hextet ) . lower ( ) )
return ':' . join ( ret_ip )
def _is_shorthand_ip ( ip_str ) :
if ip_str . count ( '::' ) == 1 :
return True
if any ( len ( x ) < 4 for x in ip_str . split ( ':' ) ) :
return True
return False
def is_iterable ( x ) :
try :
iter ( x )
except TypeError :
return False
else :
return True
import re
class Tok ( object ) :
num = 0
def __init__ ( self , name , regex , next = None ) :
self . id = Tok . num
Tok . num += 1
self . name = name
self . regex = regex
self . next = next
def literals ( choices , prefix = "" , suffix = "" ) :
return "|" . join ( prefix + re . escape ( c ) + suffix for c in choices . split ( ) )
class Lexer ( object ) :
def __init__ ( self , states , first ) :
self . regexes = { }
self . toks = { }
for state , rules in states . items ( ) :
parts = [ ]
for tok in rules :
groupid = "t%d" % tok . id
self . toks [ groupid ] = tok
parts . append ( "(?P<%s>%s)" % ( groupid , tok . regex ) )
self . regexes [ state ] = re . compile ( "|" . join ( parts ) , re . MULTILINE | re . VERBOSE )
self . state = first
def lex ( self , text ) :
end = len ( text )
state = self . state
regexes = self . regexes
toks = self . toks
start = 0
while start < end :
for match in regexes [ state ] . finditer ( text , start ) :
name = match . lastgroup
tok = toks [ name ]
toktext = match . group ( name )
start += len ( toktext )
yield ( tok . name , toktext )
if tok . next :
state = tok . next
break
self . state = state
class JsLexer ( Lexer ) :
both_before = [ Tok ( "comment" , r"/\*(.|\n)*?\*/" ) , Tok ( "linecomment" , r"//.*?$" ) , Tok ( "ws" , r"\s+" ) , Tok ( "keyword" , literals ( """ break case catch class const continue debugger default delete do else enum export extends finally for function if import in instanceof new return super switch this throw try typeof var void while with """ , suffix = r"\b" ) , next = 'reg' ) , Tok ( "reserved" , literals ( "null true false" , suffix = r"\b" ) , next = 'div' ) , Tok ( "id" , r""" ([a-zA-Z_$ ]|\\u[0-9a-fA-Z]{4}) # first char ([a-zA-Z_$0-9]|\\u[0-9a-fA-F]{4})* # rest chars """ , next = 'div' ) , Tok ( "hnum" , r"0[xX][0-9a-fA-F]+" , next = 'div' ) , Tok ( "onum" , r"0[0-7]+" ) , Tok ( "dnum" , r""" ( (0|[1-9][0-9]*) # DecimalIntegerLiteral \. # dot [0-9]* # DecimalDigits-opt ([eE][-+]?[0-9]+)? # ExponentPart-opt | \. # dot [0-9]+ # DecimalDigits ([eE][-+]?[0-9]+)? # ExponentPart-opt | (0|[1-9][0-9]*) # DecimalIntegerLiteral ([eE][-+]?[0-9]+)? # ExponentPart-opt ) """ , next = 'div' ) , Tok ( "punct" , literals ( """ >>>= === !== >>> <<= >>= <= >= == != << >> && || += -= *= %= &= |= ^= """ ) , next = "reg" ) , Tok ( "punct" , literals ( "++ -- ) ]" ) , next = 'div' ) , Tok ( "punct" , literals ( "{ } ( [ . ; , < > + - * % & | ^ ! ~ ? : =" ) , next = 'reg' ) , Tok ( "string" , r'"([^"\\]|(\\(.|\n)))*?"' , next = 'div' ) , Tok ( "string" , r"'([^'\\]|(\\(.|\n)))*?'" , next = 'div' ) , ]
both_after = [ Tok ( "other" , r"." ) , ]
states = { 'div' : both_before + [ Tok ( "punct" , literals ( "/= /" ) , next = 'reg' ) , ] + both_after , 'reg' : both_before + [ Tok ( "regex" , r""" / # opening slash # First character is.. ( [^*\\/[] # anything but * \ / or [ | \\. # or an escape sequence | \[ # or a class, which has ( [^\]\\] # anything but \ or ] | \\. # or an escape sequence )* # many times \] ) # Following characters are same, except for excluding a star ( [^\\/[] # anything but \ / or [ | \\. # or an escape sequence | \[ # or a class, which has ( [^\]\\] # anything but \ or ] | \\. # or an escape sequence )* # many times \] )* # many times / # closing slash [a-zA-Z0-9]* # trailing flags """ , next = 'div' ) , ] + both_after , }
def __init__ ( self ) :
super ( JsLexer , self ) . __init__ ( self . states , 'reg' )
def prepare_js_for_gettext ( js ) :
def escape_quotes ( m ) :
s = m . group ( 0 )
if s == '"' :
return r'\"'
else :
return s
lexer = JsLexer ( )
c = [ ]
for name , tok in lexer . lex ( js ) :
if name == 'regex' :
tok = '"REGEX"'
elif name == 'string' :
if tok . startswith ( "'" ) :
guts = re . sub ( r"\\.|." , escape_quotes , tok [ 1 : - 1 ] )
tok = '"' + guts + '"'
elif name == 'id' :
tok = tok . replace ( "\\" , "U" )
c . append ( tok )
return '' . join ( c )
import logging
import sys
import warnings
from django . conf import settings
from django . core import mail
from django . core . mail import get_connection
from django . utils . deprecation import RemovedInNextVersionWarning
from django . utils . module_loading import import_string
from django . views . debug import ExceptionReporter , get_exception_reporter_filter
from logging import NullHandler
from logging . config import dictConfig
getLogger = logging . getLogger
DEFAULT_LOGGING = { 'version' : 1 , 'disable_existing_loggers' : False , 'filters' : { 'require_debug_false' : { '()' : 'django.utils.log.RequireDebugFalse' , } , 'require_debug_true' : { '()' : 'django.utils.log.RequireDebugTrue' , } , } , 'handlers' : { 'console' : { 'level' : 'INFO' , 'filters' : [ 'require_debug_true' ] , 'class' : 'logging.StreamHandler' , } , 'null' : { 'class' : 'logging.NullHandler' , } , 'mail_admins' : { 'level' : 'ERROR' , 'filters' : [ 'require_debug_false' ] , 'class' : 'django.utils.log.AdminEmailHandler' } } , 'loggers' : { 'django' : { 'handlers' : [ 'console' ] , } , 'django.request' : { 'handlers' : [ 'mail_admins' ] , 'level' : 'ERROR' , 'propagate' : False , } , 'django.security' : { 'handlers' : [ 'mail_admins' ] , 'level' : 'ERROR' , 'propagate' : False , } , 'py.warnings' : { 'handlers' : [ 'console' ] , } , } }
def configure_logging ( logging_config , logging_settings ) :
if not sys . warnoptions :
logging . captureWarnings ( True )
warnings . simplefilter ( "default" , RemovedInNextVersionWarning )
if logging_config :
logging_config_func = import_string ( logging_config )
logging_config_func ( DEFAULT_LOGGING )
if logging_settings :
logging_config_func ( logging_settings )
class AdminEmailHandler ( logging . Handler ) :
def __init__ ( self , include_html = False , email_backend = None ) :
logging . Handler . __init__ ( self )
self . include_html = include_html
self . email_backend = email_backend
def emit ( self , record ) :
try :
request = record . request
subject = '%s (%s IP): %s' % ( record . levelname , ( 'internal' if request . META . get ( 'REMOTE_ADDR' ) in settings . INTERNAL_IPS else 'EXTERNAL' ) , record . getMessage ( ) )
filter = get_exception_reporter_filter ( request )
request_repr = '\n{0}' . format ( filter . get_request_repr ( request ) )
except Exception :
subject = '%s: %s' % ( record . levelname , record . getMessage ( ) )
request = None
request_repr = "unavailable"
subject = self . format_subject ( subject )
if record . exc_info :
exc_info = record . exc_info
else :
exc_info = ( None , record . getMessage ( ) , None )
message = "%s\n\nRequest repr(): %s" % ( self . format ( record ) , request_repr )
reporter = ExceptionReporter ( request , is_email = True , * exc_info )
html_message = reporter . get_traceback_html ( ) if self . include_html else None
mail . mail_admins ( subject , message , fail_silently = True , html_message = html_message , connection = self . connection ( ) )
def connection ( self ) :
return get_connection ( backend = self . email_backend , fail_silently = True )
def format_subject ( self , subject ) :
formatted_subject = subject . replace ( '\n' , '\\n' ) . replace ( '\r' , '\\r' )
return formatted_subject [ : 989 ]
class CallbackFilter ( logging . Filter ) :
def __init__ ( self , callback ) :
self . callback = callback
def filter ( self , record ) :
if self . callback ( record ) :
return 1
return 0
class RequireDebugFalse ( logging . Filter ) :
def filter ( self , record ) :
return not settings . DEBUG
class RequireDebugTrue ( logging . Filter ) :
def filter ( self , record ) :
return settings . DEBUG
try :
from functools import lru_cache
except ImportError :
from collections import namedtuple
from functools import update_wrapper
from threading import RLock
_CacheInfo = namedtuple ( "CacheInfo" , [ "hits" , "misses" , "maxsize" , "currsize" ] )
class _HashedSeq ( list ) :
__slots__ = 'hashvalue'
def __init__ ( self , tup , hash = hash ) :
self [ : ] = tup
self . hashvalue = hash ( tup )
def __hash__ ( self ) :
return self . hashvalue
def _make_key ( args , kwds , typed , kwd_mark = ( object ( ) , ) , fasttypes = { int , str , frozenset , type ( None ) } , sorted = sorted , tuple = tuple , type = type , len = len ) :
key = args
if kwds :
sorted_items = sorted ( kwds . items ( ) )
key += kwd_mark
for item in sorted_items :
key += item
if typed :
key += tuple ( type ( v ) for v in args )
if kwds :
key += tuple ( type ( v ) for k , v in sorted_items )
elif len ( key ) == 1 and type ( key [ 0 ] ) in fasttypes :
return key [ 0 ]
return _HashedSeq ( key )
def lru_cache ( maxsize = 100 , typed = False ) :
def decorating_function ( user_function ) :
cache = dict ( )
stats = [ 0 , 0 ]
HITS , MISSES = 0 , 1
make_key = _make_key
cache_get = cache . get
_len = len
lock = RLock ( )
root = [ ]
root [ : ] = [ root , root , None , None ]
nonlocal_root = [ root ]
PREV , NEXT , KEY , RESULT = 0 , 1 , 2 , 3
if maxsize == 0 :
def wrapper ( * args , ** kwds ) :
result = user_function ( * args , ** kwds )
stats [ MISSES ] += 1
return result
elif maxsize is None :
def wrapper ( * args , ** kwds ) :
key = make_key ( args , kwds , typed )
result = cache_get ( key , root )
if result is not root :
stats [ HITS ] += 1
return result
result = user_function ( * args , ** kwds )
cache [ key ] = result
stats [ MISSES ] += 1
return result
else :
def wrapper ( * args , ** kwds ) :
key = make_key ( args , kwds , typed ) if kwds or typed else args
with lock :
link = cache_get ( key )
if link is not None :
root , = nonlocal_root
link_prev , link_next , key , result = link
link_prev [ NEXT ] = link_next
link_next [ PREV ] = link_prev
last = root [ PREV ]
last [ NEXT ] = root [ PREV ] = link
link [ PREV ] = last
link [ NEXT ] = root
stats [ HITS ] += 1
return result
result = user_function ( * args , ** kwds )
with lock :
root , = nonlocal_root
if key in cache :
pass
elif _len ( cache ) >= maxsize :
oldroot = root
oldroot [ KEY ] = key
oldroot [ RESULT ] = result
root = nonlocal_root [ 0 ] = oldroot [ NEXT ]
oldkey = root [ KEY ]
oldvalue = root [ RESULT ]
root [ KEY ] = root [ RESULT ] = None
del cache [ oldkey ]
cache [ key ] = oldroot
else :
last = root [ PREV ]
link = [ last , root , key , result ]
last [ NEXT ] = root [ PREV ] = cache [ key ] = link
stats [ MISSES ] += 1
return result
def cache_info ( ) :
with lock :
return _CacheInfo ( stats [ HITS ] , stats [ MISSES ] , maxsize , len ( cache ) )
def cache_clear ( ) :
with lock :
cache . clear ( )
root = nonlocal_root [ 0 ]
root [ : ] = [ root , root , None , None ]
stats [ : ] = [ 0 , 0 ]
wrapper . __wrapped__ = user_function
wrapper . cache_info = cache_info
wrapper . cache_clear = cache_clear
return update_wrapper ( wrapper , user_function )
return decorating_function
from __future__ import absolute_import
import copy
from importlib import import_module
import os
import sys
import warnings
from django . core . exceptions import ImproperlyConfigured
from django . utils import six
from django . utils . deprecation import RemovedInDjango19Warning
def import_string ( dotted_path ) :
try :
module_path , class_name = dotted_path . rsplit ( '.' , 1 )
except ValueError :
msg = "%s doesn't look like a module path" % dotted_path
six . reraise ( ImportError , ImportError ( msg ) , sys . exc_info ( ) [ 2 ] )
module = import_module ( module_path )
try :
return getattr ( module , class_name )
except AttributeError :
msg = 'Module "%s" does not define a "%s" attribute/class' % ( dotted_path , class_name )
six . reraise ( ImportError , ImportError ( msg ) , sys . exc_info ( ) [ 2 ] )
def import_by_path ( dotted_path , error_prefix = '' ) :
warnings . warn ( 'import_by_path() has been deprecated. Use import_string() instead.' , RemovedInDjango19Warning , stacklevel = 2 )
try :
attr = import_string ( dotted_path )
except ImportError as e :
msg = '%sError importing module %s: "%s"' % ( error_prefix , dotted_path , e )
six . reraise ( ImproperlyConfigured , ImproperlyConfigured ( msg ) , sys . exc_info ( ) [ 2 ] )
return attr
def autodiscover_modules ( * args , ** kwargs ) :
from django . apps import apps
register_to = kwargs . get ( 'register_to' )
for app_config in apps . get_app_configs ( ) :
try :
if register_to :
before_import_registry = copy . copy ( register_to . _registry )
for module_to_search in args :
import_module ( '%s.%s' % ( app_config . name , module_to_search ) )
except :
if register_to :
register_to . _registry = before_import_registry
if module_has_submodule ( app_config . module , module_to_search ) :
raise
if sys . version_info [ : 2 ] >= ( 3 , 3 ) :
if sys . version_info [ : 2 ] >= ( 3 , 4 ) :
from importlib . util import find_spec as importlib_find
else :
from importlib import find_loader as importlib_find
def module_has_submodule ( package , module_name ) :
try :
package_name = package . __name__
package_path = package . __path__
except AttributeError :
return False
full_module_name = package_name + '.' + module_name
return importlib_find ( full_module_name , package_path ) is not None
else :
import imp
def module_has_submodule ( package , module_name ) :
name = "." . join ( [ package . __name__ , module_name ] )
try :
return sys . modules [ name ] is not None
except KeyError :
pass
try :
package_path = package . __path__
except AttributeError :
return False
for finder in sys . meta_path :
if finder . find_module ( name , package_path ) :
return True
for entry in package_path :
try :
finder = sys . path_importer_cache [ entry ]
if finder is None :
try :
file_ , _ , _ = imp . find_module ( module_name , [ entry ] )
if file_ :
file_ . close ( )
return True
except ImportError :
continue
elif finder . find_module ( name ) :
return True
else :
continue
except KeyError :
for hook in sys . path_hooks :
try :
finder = hook ( entry )
if finder . find_module ( name ) :
return True
else :
break
except ImportError :
continue
else :
if os . path . isdir ( entry ) :
try :
file_ , _ , _ = imp . find_module ( module_name , [ entry ] )
if file_ :
file_ . close ( )
return True
except ImportError :
pass
else :
return False
from django . conf import settings
from django . utils . safestring import mark_safe
from django . utils import six
def format ( number , decimal_sep , decimal_pos = None , grouping = 0 , thousand_sep = '' , force_grouping = False ) :
use_grouping = settings . USE_L10N and settings . USE_THOUSAND_SEPARATOR
use_grouping = use_grouping or force_grouping
use_grouping = use_grouping and grouping > 0
if isinstance ( number , int ) and not use_grouping and not decimal_pos :
return mark_safe ( six . text_type ( number ) )
sign = ''
str_number = six . text_type ( number )
if str_number [ 0 ] == '-' :
sign = '-'
str_number = str_number [ 1 : ]
if '.' in str_number :
int_part , dec_part = str_number . split ( '.' )
if decimal_pos is not None :
dec_part = dec_part [ : decimal_pos ]
else :
int_part , dec_part = str_number , ''
if decimal_pos is not None :
dec_part = dec_part + ( '0' * ( decimal_pos - len ( dec_part ) ) )
if dec_part :
dec_part = decimal_sep + dec_part
if use_grouping :
int_part_gd = ''
for cnt , digit in enumerate ( int_part [ : : - 1 ] ) :
if cnt and not cnt % grouping :
int_part_gd += thousand_sep
int_part_gd += digit
int_part = int_part_gd [ : : - 1 ]
return sign + int_part + dec_part
from __future__ import unicode_literals
from django . utils import six
from django . utils . six . moves import zip
ESCAPE_MAPPINGS = { "A" : None , "b" : None , "B" : None , "d" : "0" , "D" : "x" , "s" : " " , "S" : "x" , "w" : "x" , "W" : "!" , "Z" : None , }
class Choice ( list ) :
class Group ( list ) :
class NonCapture ( list ) :
def normalize ( pattern ) :
result = [ ]
non_capturing_groups = [ ]
consume_next = True
pattern_iter = next_char ( iter ( pattern ) )
num_args = 0
try :
ch , escaped = next ( pattern_iter )
except StopIteration :
return [ ( '' , [ ] ) ]
try :
while True :
if escaped :
result . append ( ch )
elif ch == '.' :
result . append ( "." )
elif ch == '|' :
raise NotImplementedError ( 'Awaiting Implementation' )
elif ch == "^" :
pass
elif ch == '$' :
break
elif ch == ')' :
start = non_capturing_groups . pop ( )
inner = NonCapture ( result [ start : ] )
result = result [ : start ] + [ inner ]
elif ch == '[' :
ch , escaped = next ( pattern_iter )
result . append ( ch )
ch , escaped = next ( pattern_iter )
while escaped or ch != ']' :
ch , escaped = next ( pattern_iter )
elif ch == '(' :
ch , escaped = next ( pattern_iter )
if ch != '?' or escaped :
name = "_%d" % num_args
num_args += 1
result . append ( Group ( ( ( "%%(%s)s" % name ) , name ) ) )
walk_to_end ( ch , pattern_iter )
else :
ch , escaped = next ( pattern_iter )
if ch in "iLmsu#" :
walk_to_end ( ch , pattern_iter )
elif ch == ':' :
non_capturing_groups . append ( len ( result ) )
elif ch != 'P' :
raise ValueError ( "Non-reversible reg-exp portion: '(?%s'" % ch )
else :
ch , escaped = next ( pattern_iter )
if ch not in ( '<' , '=' ) :
raise ValueError ( "Non-reversible reg-exp portion: '(?P%s'" % ch )
if ch == '<' :
terminal_char = '>'
else :
terminal_char = ')'
name = [ ]
ch , escaped = next ( pattern_iter )
while ch != terminal_char :
name . append ( ch )
ch , escaped = next ( pattern_iter )
param = '' . join ( name )
if terminal_char != ')' :
result . append ( Group ( ( ( "%%(%s)s" % param ) , param ) ) )
walk_to_end ( ch , pattern_iter )
else :
result . append ( Group ( ( ( "%%(%s)s" % param ) , None ) ) )
elif ch in "*?+{" :
count , ch = get_quantifier ( ch , pattern_iter )
if ch :
consume_next = False
if count == 0 :
if contains ( result [ - 1 ] , Group ) :
result [ - 1 ] = Choice ( [ None , result [ - 1 ] ] )
else :
result . pop ( )
elif count > 1 :
result . extend ( [ result [ - 1 ] ] * ( count - 1 ) )
else :
result . append ( ch )
if consume_next :
ch , escaped = next ( pattern_iter )
else :
consume_next = True
except StopIteration :
pass
except NotImplementedError :
return [ ( '' , [ ] ) ]
return list ( zip ( * flatten_result ( result ) ) )
def next_char ( input_iter ) :
for ch in input_iter :
if ch != '\\' :
yield ch , False
continue
ch = next ( input_iter )
representative = ESCAPE_MAPPINGS . get ( ch , ch )
if representative is None :
continue
yield representative , True
def walk_to_end ( ch , input_iter ) :
if ch == '(' :
nesting = 1
else :
nesting = 0
for ch , escaped in input_iter :
if escaped :
continue
elif ch == '(' :
nesting += 1
elif ch == ')' :
if not nesting :
return
nesting -= 1
def get_quantifier ( ch , input_iter ) :
if ch in '*?+' :
try :
ch2 , escaped = next ( input_iter )
except StopIteration :
ch2 = None
if ch2 == '?' :
ch2 = None
if ch == '+' :
return 1 , ch2
return 0 , ch2
quant = [ ]
while ch != '}' :
ch , escaped = next ( input_iter )
quant . append ( ch )
quant = quant [ : - 1 ]
values = '' . join ( quant ) . split ( ',' )
try :
ch , escaped = next ( input_iter )
except StopIteration :
ch = None
if ch == '?' :
ch = None
return int ( values [ 0 ] ) , ch
def contains ( source , inst ) :
if isinstance ( source , inst ) :
return True
if isinstance ( source , NonCapture ) :
for elt in source :
if contains ( elt , inst ) :
return True
return False
def flatten_result ( source ) :
if source is None :
return [ '' ] , [ [ ] ]
if isinstance ( source , Group ) :
if source [ 1 ] is None :
params = [ ]
else :
params = [ source [ 1 ] ]
return [ source [ 0 ] ] , [ params ]
result = [ '' ]
result_args = [ [ ] ]
pos = last = 0
for pos , elt in enumerate ( source ) :
if isinstance ( elt , six . string_types ) :
continue
piece = '' . join ( source [ last : pos ] )
if isinstance ( elt , Group ) :
piece += elt [ 0 ]
param = elt [ 1 ]
else :
param = None
last = pos + 1
for i in range ( len ( result ) ) :
result [ i ] += piece
if param :
result_args [ i ] . append ( param )
if isinstance ( elt , ( Choice , NonCapture ) ) :
if isinstance ( elt , NonCapture ) :
elt = [ elt ]
inner_result , inner_args = [ ] , [ ]
for item in elt :
res , args = flatten_result ( item )
inner_result . extend ( res )
inner_args . extend ( args )
new_result = [ ]
new_args = [ ]
for item , args in zip ( result , result_args ) :
for i_item , i_args in zip ( inner_result , inner_args ) :
new_result . append ( item + i_item )
new_args . append ( args [ : ] + i_args )
result = new_result
result_args = new_args
if pos >= last :
piece = '' . join ( source [ last : ] )
for i in range ( len ( result ) ) :
result [ i ] += piece
return result , result_args
from django . utils . functional import curry , Promise
from django . utils import six
class EscapeData ( object ) :
pass
class EscapeBytes ( bytes , EscapeData ) :
pass
class EscapeText ( six . text_type , EscapeData ) :
pass
if six . PY3 :
EscapeString = EscapeText
else :
EscapeString = EscapeBytes
EscapeUnicode = EscapeText
class SafeData ( object ) :
def __html__ ( self ) :
return self
class SafeBytes ( bytes , SafeData ) :
def __add__ ( self , rhs ) :
t = super ( SafeBytes , self ) . __add__ ( rhs )
if isinstance ( rhs , SafeText ) :
return SafeText ( t )
elif isinstance ( rhs , SafeBytes ) :
return SafeBytes ( t )
return t
def _proxy_method ( self , * args , ** kwargs ) :
method = kwargs . pop ( 'method' )
data = method ( self , * args , ** kwargs )
if isinstance ( data , bytes ) :
return SafeBytes ( data )
else :
return SafeText ( data )
decode = curry ( _proxy_method , method = bytes . decode )
class SafeText ( six . text_type , SafeData ) :
def __add__ ( self , rhs ) :
t = super ( SafeText , self ) . __add__ ( rhs )
if isinstance ( rhs , SafeData ) :
return SafeText ( t )
return t
def _proxy_method ( self , * args , ** kwargs ) :
method = kwargs . pop ( 'method' )
data = method ( self , * args , ** kwargs )
if isinstance ( data , bytes ) :
return SafeBytes ( data )
else :
return SafeText ( data )
encode = curry ( _proxy_method , method = six . text_type . encode )
if six . PY3 :
SafeString = SafeText
else :
SafeString = SafeBytes
SafeUnicode = SafeText
def mark_safe ( s ) :
if isinstance ( s , SafeData ) :
return s
if isinstance ( s , bytes ) or ( isinstance ( s , Promise ) and s . _delegate_bytes ) :
return SafeBytes ( s )
if isinstance ( s , ( six . text_type , Promise ) ) :
return SafeText ( s )
return SafeString ( str ( s ) )
def mark_for_escaping ( s ) :
if isinstance ( s , ( SafeData , EscapeData ) ) :
return s
if isinstance ( s , bytes ) or ( isinstance ( s , Promise ) and s . _delegate_bytes ) :
return EscapeBytes ( s )
if isinstance ( s , ( six . text_type , Promise ) ) :
return EscapeText ( s )
return EscapeBytes ( bytes ( s ) )
import operator
import sys
import types
__author__ = "Benjamin Peterson "
__version__ = "1.6.1"
PY2 = sys . version_info [ 0 ] == 2
PY3 = sys . version_info [ 0 ] == 3
if PY3 :
string_types = str ,
integer_types = int ,
class_types = type ,
text_type = str
binary_type = bytes
MAXSIZE = sys . maxsize
else :
string_types = basestring ,
integer_types = ( int , long )
class_types = ( type , types . ClassType )
text_type = unicode
binary_type = str
if sys . platform . startswith ( "java" ) :
MAXSIZE = int ( ( 1 << 31 ) - 1 )
else :
class X ( object ) :
def __len__ ( self ) :
return 1 << 31
try :
len ( X ( ) )
except OverflowError :
MAXSIZE = int ( ( 1 << 31 ) - 1 )
else :
MAXSIZE = int ( ( 1 << 63 ) - 1 )
del X
def _add_doc ( func , doc ) :
func . __doc__ = doc
def _import_module ( name ) :
__import__ ( name )
return sys . modules [ name ]
class _LazyDescr ( object ) :
def __init__ ( self , name ) :
self . name = name
def __get__ ( self , obj , tp ) :
try :
result = self . _resolve ( )
except ImportError :
raise AttributeError ( "%s could not be imported " % self . name )
setattr ( obj , self . name , result )
delattr ( obj . __class__ , self . name )
return result
class MovedModule ( _LazyDescr ) :
def __init__ ( self , name , old , new = None ) :
super ( MovedModule , self ) . __init__ ( name )
if PY3 :
if new is None :
new = name
self . mod = new
else :
self . mod = old
def _resolve ( self ) :
return _import_module ( self . mod )
def __getattr__ ( self , attr ) :
if ( attr in ( "__file__" , "__name__" , "__path__" ) and self . mod not in sys . modules ) :
raise AttributeError ( attr )
try :
_module = self . _resolve ( )
except ImportError :
raise AttributeError ( attr )
value = getattr ( _module , attr )
setattr ( self , attr , value )
return value
class _LazyModule ( types . ModuleType ) :
def __init__ ( self , name ) :
super ( _LazyModule , self ) . __init__ ( name )
self . __doc__ = self . __class__ . __doc__
def __dir__ ( self ) :
attrs = [ "__doc__" , "__name__" ]
attrs += [ attr . name for attr in self . _moved_attributes ]
return attrs
_moved_attributes = [ ]
class MovedAttribute ( _LazyDescr ) :
def __init__ ( self , name , old_mod , new_mod , old_attr = None , new_attr = None ) :
super ( MovedAttribute , self ) . __init__ ( name )
if PY3 :
if new_mod is None :
new_mod = name
self . mod = new_mod
if new_attr is None :
if old_attr is None :
new_attr = name
else :
new_attr = old_attr
self . attr = new_attr
else :
self . mod = old_mod
if old_attr is None :
old_attr = name
self . attr = old_attr
def _resolve ( self ) :
module = _import_module ( self . mod )
return getattr ( module , self . attr )
class _MovedItems ( _LazyModule ) :
_moved_attributes = [ MovedAttribute ( "cStringIO" , "cStringIO" , "io" , "StringIO" ) , MovedAttribute ( "filter" , "itertools" , "builtins" , "ifilter" , "filter" ) , MovedAttribute ( "filterfalse" , "itertools" , "itertools" , "ifilterfalse" , "filterfalse" ) , MovedAttribute ( "input" , "__builtin__" , "builtins" , "raw_input" , "input" ) , MovedAttribute ( "map" , "itertools" , "builtins" , "imap" , "map" ) , MovedAttribute ( "range" , "__builtin__" , "builtins" , "xrange" , "range" ) , MovedAttribute ( "reload_module" , "__builtin__" , "imp" , "reload" ) , MovedAttribute ( "reduce" , "__builtin__" , "functools" ) , MovedAttribute ( "StringIO" , "StringIO" , "io" ) , MovedAttribute ( "UserString" , "UserString" , "collections" ) , MovedAttribute ( "xrange" , "__builtin__" , "builtins" , "xrange" , "range" ) , MovedAttribute ( "zip" , "itertools" , "builtins" , "izip" , "zip" ) , MovedAttribute ( "zip_longest" , "itertools" , "itertools" , "izip_longest" , "zip_longest" ) , MovedModule ( "builtins" , "__builtin__" ) , MovedModule ( "configparser" , "ConfigParser" ) , MovedModule ( "copyreg" , "copy_reg" ) , MovedModule ( "dbm_gnu" , "gdbm" , "dbm.gnu" ) , MovedModule ( "http_cookiejar" , "cookielib" , "http.cookiejar" ) , MovedModule ( "http_cookies" , "Cookie" , "http.cookies" ) , MovedModule ( "html_entities" , "htmlentitydefs" , "html.entities" ) , MovedModule ( "html_parser" , "HTMLParser" , "html.parser" ) , MovedModule ( "http_client" , "httplib" , "http.client" ) , MovedModule ( "email_mime_multipart" , "email.MIMEMultipart" , "email.mime.multipart" ) , MovedModule ( "email_mime_text" , "email.MIMEText" , "email.mime.text" ) , MovedModule ( "email_mime_base" , "email.MIMEBase" , "email.mime.base" ) , MovedModule ( "BaseHTTPServer" , "BaseHTTPServer" , "http.server" ) , MovedModule ( "CGIHTTPServer" , "CGIHTTPServer" , "http.server" ) , MovedModule ( "SimpleHTTPServer" , "SimpleHTTPServer" , "http.server" ) , MovedModule ( "cPickle" , "cPickle" , "pickle" ) , MovedModule ( "queue" , "Queue" ) , MovedModule ( "reprlib" , "repr" ) , MovedModule ( "socketserver" , "SocketServer" ) , MovedModule ( "_thread" , "thread" , "_thread" ) , MovedModule ( "tkinter" , "Tkinter" ) , MovedModule ( "tkinter_dialog" , "Dialog" , "tkinter.dialog" ) , MovedModule ( "tkinter_filedialog" , "FileDialog" , "tkinter.filedialog" ) , MovedModule ( "tkinter_scrolledtext" , "ScrolledText" , "tkinter.scrolledtext" ) , MovedModule ( "tkinter_simpledialog" , "SimpleDialog" , "tkinter.simpledialog" ) , MovedModule ( "tkinter_tix" , "Tix" , "tkinter.tix" ) , MovedModule ( "tkinter_ttk" , "ttk" , "tkinter.ttk" ) , MovedModule ( "tkinter_constants" , "Tkconstants" , "tkinter.constants" ) , MovedModule ( "tkinter_dnd" , "Tkdnd" , "tkinter.dnd" ) , MovedModule ( "tkinter_colorchooser" , "tkColorChooser" , "tkinter.colorchooser" ) , MovedModule ( "tkinter_commondialog" , "tkCommonDialog" , "tkinter.commondialog" ) , MovedModule ( "tkinter_tkfiledialog" , "tkFileDialog" , "tkinter.filedialog" ) , MovedModule ( "tkinter_font" , "tkFont" , "tkinter.font" ) , MovedModule ( "tkinter_messagebox" , "tkMessageBox" , "tkinter.messagebox" ) , MovedModule ( "tkinter_tksimpledialog" , "tkSimpleDialog" , "tkinter.simpledialog" ) , MovedModule ( "urllib_parse" , __name__ + ".moves.urllib_parse" , "urllib.parse" ) , MovedModule ( "urllib_error" , __name__ + ".moves.urllib_error" , "urllib.error" ) , MovedModule ( "urllib" , __name__ + ".moves.urllib" , __name__ + ".moves.urllib" ) , MovedModule ( "urllib_robotparser" , "robotparser" , "urllib.robotparser" ) , MovedModule ( "xmlrpc_client" , "xmlrpclib" , "xmlrpc.client" ) , MovedModule ( "xmlrpc_server" , "xmlrpclib" , "xmlrpc.server" ) , MovedModule ( "winreg" , "_winreg" ) , ]
for attr in _moved_attributes :
setattr ( _MovedItems , attr . name , attr )
if isinstance ( attr , MovedModule ) :
sys . modules [ __name__ + ".moves." + attr . name ] = attr
del attr
_MovedItems . _moved_attributes = _moved_attributes
moves = sys . modules [ __name__ + ".moves" ] = _MovedItems ( __name__ + ".moves" )
class Module_six_moves_urllib_parse ( _LazyModule ) :
_urllib_parse_moved_attributes = [ MovedAttribute ( "ParseResult" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "SplitResult" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "parse_qs" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "parse_qsl" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "urldefrag" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "urljoin" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "urlparse" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "urlsplit" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "urlunparse" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "urlunsplit" , "urlparse" , "urllib.parse" ) , MovedAttribute ( "quote" , "urllib" , "urllib.parse" ) , MovedAttribute ( "quote_plus" , "urllib" , "urllib.parse" ) , MovedAttribute ( "unquote" , "urllib" , "urllib.parse" ) , MovedAttribute ( "unquote_plus" , "urllib" , "urllib.parse" ) , MovedAttribute ( "urlencode" , "urllib" , "urllib.parse" ) , MovedAttribute ( "splitquery" , "urllib" , "urllib.parse" ) , ]
for attr in _urllib_parse_moved_attributes :
setattr ( Module_six_moves_urllib_parse , attr . name , attr )
del attr
Module_six_moves_urllib_parse . _moved_attributes = _urllib_parse_moved_attributes
sys . modules [ __name__ + ".moves.urllib_parse" ] = sys . modules [ __name__ + ".moves.urllib.parse" ] = Module_six_moves_urllib_parse ( __name__ + ".moves.urllib_parse" )
class Module_six_moves_urllib_error ( _LazyModule ) :
_urllib_error_moved_attributes = [ MovedAttribute ( "URLError" , "urllib2" , "urllib.error" ) , MovedAttribute ( "HTTPError" , "urllib2" , "urllib.error" ) , MovedAttribute ( "ContentTooShortError" , "urllib" , "urllib.error" ) , ]
for attr in _urllib_error_moved_attributes :
setattr ( Module_six_moves_urllib_error , attr . name , attr )
del attr
Module_six_moves_urllib_error . _moved_attributes = _urllib_error_moved_attributes
sys . modules [ __name__ + ".moves.urllib_error" ] = sys . modules [ __name__ + ".moves.urllib.error" ] = Module_six_moves_urllib_error ( __name__ + ".moves.urllib.error" )
class Module_six_moves_urllib_request ( _LazyModule ) :
_urllib_request_moved_attributes = [ MovedAttribute ( "urlopen" , "urllib2" , "urllib.request" ) , MovedAttribute ( "install_opener" , "urllib2" , "urllib.request" ) , MovedAttribute ( "build_opener" , "urllib2" , "urllib.request" ) , MovedAttribute ( "pathname2url" , "urllib" , "urllib.request" ) , MovedAttribute ( "url2pathname" , "urllib" , "urllib.request" ) , MovedAttribute ( "getproxies" , "urllib" , "urllib.request" ) , MovedAttribute ( "Request" , "urllib2" , "urllib.request" ) , MovedAttribute ( "OpenerDirector" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPDefaultErrorHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPRedirectHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPCookieProcessor" , "urllib2" , "urllib.request" ) , MovedAttribute ( "ProxyHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "BaseHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPPasswordMgr" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPPasswordMgrWithDefaultRealm" , "urllib2" , "urllib.request" ) , MovedAttribute ( "AbstractBasicAuthHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPBasicAuthHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "ProxyBasicAuthHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "AbstractDigestAuthHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPDigestAuthHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "ProxyDigestAuthHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPSHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "FileHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "FTPHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "CacheFTPHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "UnknownHandler" , "urllib2" , "urllib.request" ) , MovedAttribute ( "HTTPErrorProcessor" , "urllib2" , "urllib.request" ) , MovedAttribute ( "urlretrieve" , "urllib" , "urllib.request" ) , MovedAttribute ( "urlcleanup" , "urllib" , "urllib.request" ) , MovedAttribute ( "URLopener" , "urllib" , "urllib.request" ) , MovedAttribute ( "FancyURLopener" , "urllib" , "urllib.request" ) , MovedAttribute ( "proxy_bypass" , "urllib" , "urllib.request" ) , ]
for attr in _urllib_request_moved_attributes :
setattr ( Module_six_moves_urllib_request , attr . name , attr )
del attr
Module_six_moves_urllib_request . _moved_attributes = _urllib_request_moved_attributes
sys . modules [ __name__ + ".moves.urllib_request" ] = sys . modules [ __name__ + ".moves.urllib.request" ] = Module_six_moves_urllib_request ( __name__ + ".moves.urllib.request" )
class Module_six_moves_urllib_response ( _LazyModule ) :
_urllib_response_moved_attributes = [ MovedAttribute ( "addbase" , "urllib" , "urllib.response" ) , MovedAttribute ( "addclosehook" , "urllib" , "urllib.response" ) , MovedAttribute ( "addinfo" , "urllib" , "urllib.response" ) , MovedAttribute ( "addinfourl" , "urllib" , "urllib.response" ) , ]
for attr in _urllib_response_moved_attributes :
setattr ( Module_six_moves_urllib_response , attr . name , attr )
del attr
Module_six_moves_urllib_response . _moved_attributes = _urllib_response_moved_attributes
sys . modules [ __name__ + ".moves.urllib_response" ] = sys . modules [ __name__ + ".moves.urllib.response" ] = Module_six_moves_urllib_response ( __name__ + ".moves.urllib.response" )
class Module_six_moves_urllib_robotparser ( _LazyModule ) :
_urllib_robotparser_moved_attributes = [ MovedAttribute ( "RobotFileParser" , "robotparser" , "urllib.robotparser" ) , ]
for attr in _urllib_robotparser_moved_attributes :
setattr ( Module_six_moves_urllib_robotparser , attr . name , attr )
del attr
Module_six_moves_urllib_robotparser . _moved_attributes = _urllib_robotparser_moved_attributes
sys . modules [ __name__ + ".moves.urllib_robotparser" ] = sys . modules [ __name__ + ".moves.urllib.robotparser" ] = Module_six_moves_urllib_robotparser ( __name__ + ".moves.urllib.robotparser" )
class Module_six_moves_urllib ( types . ModuleType ) :
parse = sys . modules [ __name__ + ".moves.urllib_parse" ]
error = sys . modules [ __name__ + ".moves.urllib_error" ]
request = sys . modules [ __name__ + ".moves.urllib_request" ]
response = sys . modules [ __name__ + ".moves.urllib_response" ]
robotparser = sys . modules [ __name__ + ".moves.urllib_robotparser" ]
def __dir__ ( self ) :
return [ 'parse' , 'error' , 'request' , 'response' , 'robotparser' ]
sys . modules [ __name__ + ".moves.urllib" ] = Module_six_moves_urllib ( __name__ + ".moves.urllib" )
def add_move ( move ) :
setattr ( _MovedItems , move . name , move )
def remove_move ( name ) :
try :
delattr ( _MovedItems , name )
except AttributeError :
try :
del moves . __dict__ [ name ]
except KeyError :
raise AttributeError ( "no such move, %r" % ( name , ) )
if PY3 :
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
_iterlists = "lists"
else :
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
_iterlists = "iterlists"
try :
advance_iterator = next
except NameError :
def advance_iterator ( it ) :
return it . next ( )
next = advance_iterator
try :
callable = callable
except NameError :
def callable ( obj ) :
return any ( "__call__" in klass . __dict__ for klass in type ( obj ) . __mro__ )
if PY3 :
def get_unbound_function ( unbound ) :
return unbound
create_bound_method = types . MethodType
Iterator = object
else :
def get_unbound_function ( unbound ) :
return unbound . im_func
def create_bound_method ( func , obj ) :
return types . MethodType ( func , obj , obj . __class__ )
class Iterator ( object ) :
def next ( self ) :
return type ( self ) . __next__ ( self )
callable = callable
_add_doc ( get_unbound_function , """Get the function out of a possibly unbound function""" )
get_method_function = operator . attrgetter ( _meth_func )
get_method_self = operator . attrgetter ( _meth_self )
get_function_closure = operator . attrgetter ( _func_closure )
get_function_code = operator . attrgetter ( _func_code )
get_function_defaults = operator . attrgetter ( _func_defaults )
get_function_globals = operator . attrgetter ( _func_globals )
def iterkeys ( d , ** kw ) :
return iter ( getattr ( d , _iterkeys ) ( ** kw ) )
def itervalues ( d , ** kw ) :
return iter ( getattr ( d , _itervalues ) ( ** kw ) )
def iteritems ( d , ** kw ) :
return iter ( getattr ( d , _iteritems ) ( ** kw ) )
def iterlists ( d , ** kw ) :
return iter ( getattr ( d , _iterlists ) ( ** kw ) )
if PY3 :
def b ( s ) :
return s . encode ( "latin-1" )
def u ( s ) :
return s
unichr = chr
if sys . version_info [ 1 ] <= 1 :
def int2byte ( i ) :
return bytes ( ( i , ) )
else :
int2byte = operator . methodcaller ( "to_bytes" , 1 , "big" )
byte2int = operator . itemgetter ( 0 )
indexbytes = operator . getitem
iterbytes = iter
import io
StringIO = io . StringIO
BytesIO = io . BytesIO
else :
def b ( s ) :
return s
def u ( s ) :
return unicode ( s . replace ( r'\\' , r'\\\\' ) , "unicode_escape" )
unichr = unichr
int2byte = chr
def byte2int ( bs ) :
return ord ( bs [ 0 ] )
def indexbytes ( buf , i ) :
return ord ( buf [ i ] )
def iterbytes ( buf ) :
return ( ord ( byte ) for byte in buf )
import StringIO
StringIO = BytesIO = StringIO . StringIO
_add_doc ( b , """Byte literal""" )
_add_doc ( u , """Text literal""" )
if PY3 :
exec_ = getattr ( moves . builtins , "exec" )
def reraise ( tp , value , tb = None ) :
if value . __traceback__ is not tb :
raise value . with_traceback ( tb )
raise value
else :
def exec_ ( _code_ , _globs_ = None , _locs_ = None ) :
if _globs_ is None :
frame = sys . _getframe ( 1 )
_globs_ = frame . f_globals
if _locs_ is None :
_locs_ = frame . f_locals
del frame
elif _locs_ is None :
_locs_ = _globs_
exec ( """exec _code_ in _globs_, _locs_""" )
exec_ ( """def reraise(tp, value, tb=None): raise tp, value, tb """ )
print_ = getattr ( moves . builtins , "print" , None )
if print_ is None :
def print_ ( * args , ** kwargs ) :
fp = kwargs . pop ( "file" , sys . stdout )
if fp is None :
return
def write ( data ) :
if not isinstance ( data , basestring ) :
data = str ( data )
if ( isinstance ( fp , file ) and isinstance ( data , unicode ) and fp . encoding is not None ) :
errors = getattr ( fp , "errors" , None )
if errors is None :
errors = "strict"
data = data . encode ( fp . encoding , errors )
fp . write ( data )
want_unicode = False
sep = kwargs . pop ( "sep" , None )
if sep is not None :
if isinstance ( sep , unicode ) :
want_unicode = True
elif not isinstance ( sep , str ) :
raise TypeError ( "sep must be None or a string" )
end = kwargs . pop ( "end" , None )
if end is not None :
if isinstance ( end , unicode ) :
want_unicode = True
elif not isinstance ( end , str ) :
raise TypeError ( "end must be None or a string" )
if kwargs :
raise TypeError ( "invalid keyword arguments to print()" )
if not want_unicode :
for arg in args :
if isinstance ( arg , unicode ) :
want_unicode = True
break
if want_unicode :
newline = unicode ( "\n" )
space = unicode ( " " )
else :
newline = "\n"
space = " "
if sep is None :
sep = space
if end is None :
end = newline
for i , arg in enumerate ( args ) :
if i :
write ( sep )
write ( arg )
write ( end )
_add_doc ( reraise , """Reraise an exception.""" )
def with_metaclass ( meta , * bases ) :
class metaclass ( meta ) :
__call__ = type . __call__
__init__ = type . __init__
def __new__ ( cls , name , this_bases , d ) :
if this_bases is None :
return type . __new__ ( cls , name , ( ) , d )
return meta ( name , bases , d )
return metaclass ( 'temporary_class' , None , { } )
def add_metaclass ( metaclass ) :
def wrapper ( cls ) :
orig_vars = cls . __dict__ . copy ( )
orig_vars . pop ( '__dict__' , None )
orig_vars . pop ( '__weakref__' , None )
slots = orig_vars . get ( '__slots__' )
if slots is not None :
if isinstance ( slots , str ) :
slots = [ slots ]
for slots_var in slots :
orig_vars . pop ( slots_var )
return metaclass ( cls . __name__ , cls . __bases__ , orig_vars )
return wrapper
if PY3 :
_assertCountEqual = "assertCountEqual"
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
memoryview = memoryview
buffer_types = ( bytes , bytearray , memoryview )
else :
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
if sys . platform . startswith ( 'java' ) :
memoryview = memoryview
else :
memoryview = buffer
buffer_types = ( bytearray , memoryview )
def assertCountEqual ( self , * args , ** kwargs ) :
return getattr ( self , _assertCountEqual ) ( * args , ** kwargs )
def assertRaisesRegex ( self , * args , ** kwargs ) :
return getattr ( self , _assertRaisesRegex ) ( * args , ** kwargs )
def assertRegex ( self , * args , ** kwargs ) :
return getattr ( self , _assertRegex ) ( * args , ** kwargs )
add_move ( MovedModule ( "_dummy_thread" , "dummy_thread" ) )
add_move ( MovedModule ( "_thread" , "thread" ) )
import contextlib
try :
import threading
except ImportError :
import dummy_threading as threading
class RWLock ( object ) :
def __init__ ( self ) :
self . mutex = threading . RLock ( )
self . can_read = threading . Semaphore ( 0 )
self . can_write = threading . Semaphore ( 0 )
self . active_readers = 0
self . active_writers = 0
self . waiting_readers = 0
self . waiting_writers = 0
def reader_enters ( self ) :
with self . mutex :
if self . active_writers == 0 and self . waiting_writers == 0 :
self . active_readers += 1
self . can_read . release ( )
else :
self . waiting_readers += 1
self . can_read . acquire ( )
def reader_leaves ( self ) :
with self . mutex :
self . active_readers -= 1
if self . active_readers == 0 and self . waiting_writers != 0 :
self . active_writers += 1
self . waiting_writers -= 1
self . can_write . release ( )
@ contextlib . contextmanager
def reader ( self ) :
self . reader_enters ( )
try :
yield
finally :
self . reader_leaves ( )
def writer_enters ( self ) :
with self . mutex :
if self . active_writers == 0 and self . waiting_writers == 0 and self . active_readers == 0 :
self . active_writers += 1
self . can_write . release ( )
else :
self . waiting_writers += 1
self . can_write . acquire ( )
def writer_leaves ( self ) :
with self . mutex :
self . active_writers -= 1
if self . waiting_writers != 0 :
self . active_writers += 1
self . waiting_writers -= 1
self . can_write . release ( )
elif self . waiting_readers != 0 :
t = self . waiting_readers
self . waiting_readers = 0
self . active_readers += t
while t > 0 :
self . can_read . release ( )
t -= 1
@ contextlib . contextmanager
def writer ( self ) :
self . writer_enters ( )
try :
yield
finally :
self . writer_leaves ( )
from django . utils import six
color_names = ( 'black' , 'red' , 'green' , 'yellow' , 'blue' , 'magenta' , 'cyan' , 'white' )
foreground = dict ( ( color_names [ x ] , '3%s' % x ) for x in range ( 8 ) )
background = dict ( ( color_names [ x ] , '4%s' % x ) for x in range ( 8 ) )
RESET = '0'
opt_dict = { 'bold' : '1' , 'underscore' : '4' , 'blink' : '5' , 'reverse' : '7' , 'conceal' : '8' }
def colorize ( text = '' , opts = ( ) , ** kwargs ) :
code_list = [ ]
if text == '' and len ( opts ) == 1 and opts [ 0 ] == 'reset' :
return '\x1b[%sm' % RESET
for k , v in six . iteritems ( kwargs ) :
if k == 'fg' :
code_list . append ( foreground [ v ] )
elif k == 'bg' :
code_list . append ( background [ v ] )
for o in opts :
if o in opt_dict :
code_list . append ( opt_dict [ o ] )
if 'noreset' not in opts :
text = '%s\x1b[%sm' % ( text or '' , RESET )
return '%s%s' % ( ( '\x1b[%sm' % ';' . join ( code_list ) ) , text or '' )
def make_style ( opts = ( ) , ** kwargs ) :
return lambda text : colorize ( text , opts , ** kwargs )
NOCOLOR_PALETTE = 'nocolor'
DARK_PALETTE = 'dark'
LIGHT_PALETTE = 'light'
PALETTES = { NOCOLOR_PALETTE : { 'ERROR' : { } , 'WARNING' : { } , 'NOTICE' : { } , 'SQL_FIELD' : { } , 'SQL_COLTYPE' : { } , 'SQL_KEYWORD' : { } , 'SQL_TABLE' : { } , 'HTTP_INFO' : { } , 'HTTP_SUCCESS' : { } , 'HTTP_REDIRECT' : { } , 'HTTP_NOT_MODIFIED' : { } , 'HTTP_BAD_REQUEST' : { } , 'HTTP_NOT_FOUND' : { } , 'HTTP_SERVER_ERROR' : { } , 'MIGRATE_HEADING' : { } , 'MIGRATE_LABEL' : { } , 'MIGRATE_SUCCESS' : { } , 'MIGRATE_FAILURE' : { } , } , DARK_PALETTE : { 'ERROR' : { 'fg' : 'red' , 'opts' : ( 'bold' , ) } , 'WARNING' : { 'fg' : 'yellow' , 'opts' : ( 'bold' , ) } , 'NOTICE' : { 'fg' : 'red' } , 'SQL_FIELD' : { 'fg' : 'green' , 'opts' : ( 'bold' , ) } , 'SQL_COLTYPE' : { 'fg' : 'green' } , 'SQL_KEYWORD' : { 'fg' : 'yellow' } , 'SQL_TABLE' : { 'opts' : ( 'bold' , ) } , 'HTTP_INFO' : { 'opts' : ( 'bold' , ) } , 'HTTP_SUCCESS' : { } , 'HTTP_REDIRECT' : { 'fg' : 'green' } , 'HTTP_NOT_MODIFIED' : { 'fg' : 'cyan' } , 'HTTP_BAD_REQUEST' : { 'fg' : 'red' , 'opts' : ( 'bold' , ) } , 'HTTP_NOT_FOUND' : { 'fg' : 'yellow' } , 'HTTP_SERVER_ERROR' : { 'fg' : 'magenta' , 'opts' : ( 'bold' , ) } , 'MIGRATE_HEADING' : { 'fg' : 'cyan' , 'opts' : ( 'bold' , ) } , 'MIGRATE_LABEL' : { 'opts' : ( 'bold' , ) } , 'MIGRATE_SUCCESS' : { 'fg' : 'green' , 'opts' : ( 'bold' , ) } , 'MIGRATE_FAILURE' : { 'fg' : 'red' , 'opts' : ( 'bold' , ) } , } , LIGHT_PALETTE : { 'ERROR' : { 'fg' : 'red' , 'opts' : ( 'bold' , ) } , 'WARNING' : { 'fg' : 'yellow' , 'opts' : ( 'bold' , ) } , 'NOTICE' : { 'fg' : 'red' } , 'SQL_FIELD' : { 'fg' : 'green' , 'opts' : ( 'bold' , ) } , 'SQL_COLTYPE' : { 'fg' : 'green' } , 'SQL_KEYWORD' : { 'fg' : 'blue' } , 'SQL_TABLE' : { 'opts' : ( 'bold' , ) } , 'HTTP_INFO' : { 'opts' : ( 'bold' , ) } , 'HTTP_SUCCESS' : { } , 'HTTP_REDIRECT' : { 'fg' : 'green' , 'opts' : ( 'bold' , ) } , 'HTTP_NOT_MODIFIED' : { 'fg' : 'green' } , 'HTTP_BAD_REQUEST' : { 'fg' : 'red' , 'opts' : ( 'bold' , ) } , 'HTTP_NOT_FOUND' : { 'fg' : 'red' } , 'HTTP_SERVER_ERROR' : { 'fg' : 'magenta' , 'opts' : ( 'bold' , ) } , 'MIGRATE_HEADING' : { 'fg' : 'cyan' , 'opts' : ( 'bold' , ) } , 'MIGRATE_LABEL' : { 'opts' : ( 'bold' , ) } , 'MIGRATE_SUCCESS' : { 'fg' : 'green' , 'opts' : ( 'bold' , ) } , 'MIGRATE_FAILURE' : { 'fg' : 'red' , 'opts' : ( 'bold' , ) } , } }
DEFAULT_PALETTE = DARK_PALETTE
def parse_color_setting ( config_string ) :
if not config_string :
return PALETTES [ DEFAULT_PALETTE ]
parts = config_string . lower ( ) . split ( ';' )
palette = PALETTES [ NOCOLOR_PALETTE ] . copy ( )
for part in parts :
if part in PALETTES :
palette . update ( PALETTES [ part ] )
elif '=' in part :
definition = { }
role , instructions = part . split ( '=' )
role = role . upper ( )
styles = instructions . split ( ',' )
styles . reverse ( )
colors = styles . pop ( ) . split ( '/' )
colors . reverse ( )
fg = colors . pop ( )
if fg in color_names :
definition [ 'fg' ] = fg
if colors and colors [ - 1 ] in color_names :
definition [ 'bg' ] = colors [ - 1 ]
opts = tuple ( s for s in styles if s in opt_dict . keys ( ) )
if opts :
definition [ 'opts' ] = opts
if role in PALETTES [ NOCOLOR_PALETTE ] and definition :
palette [ role ] = definition
if palette == PALETTES [ NOCOLOR_PALETTE ] :
return None
return palette
from __future__ import unicode_literals
import re
import unicodedata
from gzip import GzipFile
from io import BytesIO
import warnings
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . encoding import force_text
from django . utils . functional import allow_lazy , SimpleLazyObject
from django . utils import six
from django . utils . six . moves import html_entities
from django . utils . translation import ugettext_lazy , ugettext as _ , pgettext
from django . utils . safestring import mark_safe
if six . PY2 :
from django . utils . encoding import force_unicode
capfirst = lambda x : x and force_text ( x ) [ 0 ] . upper ( ) + force_text ( x ) [ 1 : ]
capfirst = allow_lazy ( capfirst , six . text_type )
re_words = re . compile ( r'<.*?>|((?:\w[-\w]*|&.*?;)+)' , re . U | re . S )
re_chars = re . compile ( r'<.*?>|(.)' , re . U | re . S )
re_tag = re . compile ( r'<(/)?([^ ]+?)(?:(\s*/)| .*?)?>' , re . S )
re_newlines = re . compile ( r'\r\n|\r' )
re_camel_case = re . compile ( r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))' )
def wrap ( text , width ) :
text = force_text ( text )
def _generator ( ) :
for line in text . splitlines ( True ) :
max_width = min ( ( line . endswith ( '\n' ) and width + 1 or width ) , width )
while len ( line ) > max_width :
space = line [ : max_width + 1 ] . rfind ( ' ' ) + 1
if space == 0 :
space = line . find ( ' ' ) + 1
if space == 0 :
yield line
line = ''
break
yield '%s\n' % line [ : space - 1 ]
line = line [ space : ]
max_width = min ( ( line . endswith ( '\n' ) and width + 1 or width ) , width )
if line :
yield line
return '' . join ( _generator ( ) )
wrap = allow_lazy ( wrap , six . text_type )
class Truncator ( SimpleLazyObject ) :
def __init__ ( self , text ) :
super ( Truncator , self ) . __init__ ( lambda : force_text ( text ) )
def add_truncation_text ( self , text , truncate = None ) :
if truncate is None :
truncate = pgettext ( 'String to return when truncating text' , '%(truncated_text)s...' )
truncate = force_text ( truncate )
if '%(truncated_text)s' in truncate :
return truncate % { 'truncated_text' : text }
if text . endswith ( truncate ) :
return text
return '%s%s' % ( text , truncate )
def chars ( self , num , truncate = None , html = False ) :
length = int ( num )
text = unicodedata . normalize ( 'NFC' , self . _wrapped )
truncate_len = length
for char in self . add_truncation_text ( '' , truncate ) :
if not unicodedata . combining ( char ) :
truncate_len -= 1
if truncate_len == 0 :
break
if html :
return self . _truncate_html ( length , truncate , text , truncate_len , False )
return self . _text_chars ( length , truncate , text , truncate_len )
chars = allow_lazy ( chars )
def _text_chars ( self , length , truncate , text , truncate_len ) :
s_len = 0
end_index = None
for i , char in enumerate ( text ) :
if unicodedata . combining ( char ) :
continue
s_len += 1
if end_index is None and s_len > truncate_len :
end_index = i
if s_len > length :
return self . add_truncation_text ( text [ : end_index or 0 ] , truncate )
return text
def words ( self , num , truncate = None , html = False ) :
length = int ( num )
if html :
return self . _truncate_html ( length , truncate , self . _wrapped , length , True )
return self . _text_words ( length , truncate )
words = allow_lazy ( words )
def _text_words ( self , length , truncate ) :
words = self . _wrapped . split ( )
if len ( words ) > length :
words = words [ : length ]
return self . add_truncation_text ( ' ' . join ( words ) , truncate )
return ' ' . join ( words )
def _truncate_html ( self , length , truncate , text , truncate_len , words ) :
if words and length <= 0 :
return ''
html4_singlets = ( 'br' , 'col' , 'link' , 'base' , 'img' , 'param' , 'area' , 'hr' , 'input' )
pos = 0
end_text_pos = 0
current_len = 0
open_tags = [ ]
regex = re_words if words else re_chars
while current_len <= length :
m = regex . search ( text , pos )
if not m :
break
pos = m . end ( 0 )
if m . group ( 1 ) :
current_len += 1
if current_len == truncate_len :
end_text_pos = pos
continue
tag = re_tag . match ( m . group ( 0 ) )
if not tag or current_len >= truncate_len :
continue
closing_tag , tagname , self_closing = tag . groups ( )
tagname = tagname . lower ( )
if self_closing or tagname in html4_singlets :
pass
elif closing_tag :
try :
i = open_tags . index ( tagname )
except ValueError :
pass
else :
open_tags = open_tags [ i + 1 : ]
else :
open_tags . insert ( 0 , tagname )
if current_len <= length :
return text
out = text [ : end_text_pos ]
truncate_text = self . add_truncation_text ( '' , truncate )
if truncate_text :
out += truncate_text
for tag in open_tags :
out += '%s>' % tag
return out
def get_valid_filename ( s ) :
s = force_text ( s ) . strip ( ) . replace ( ' ' , '_' )
return re . sub ( r'(?u)[^-\w.]' , '' , s )
get_valid_filename = allow_lazy ( get_valid_filename , six . text_type )
def get_text_list ( list_ , last_word = ugettext_lazy ( 'or' ) ) :
if len ( list_ ) == 0 :
return ''
if len ( list_ ) == 1 :
return force_text ( list_ [ 0 ] )
return '%s %s %s' % ( _ ( ', ' ) . join ( force_text ( i ) for i in list_ [ : - 1 ] ) , force_text ( last_word ) , force_text ( list_ [ - 1 ] ) )
get_text_list = allow_lazy ( get_text_list , six . text_type )
def normalize_newlines ( text ) :
text = force_text ( text )
return re_newlines . sub ( '\n' , text )
normalize_newlines = allow_lazy ( normalize_newlines , six . text_type )
def phone2numeric ( phone ) :
char2number = { 'a' : '2' , 'b' : '2' , 'c' : '2' , 'd' : '3' , 'e' : '3' , 'f' : '3' , 'g' : '4' , 'h' : '4' , 'i' : '4' , 'j' : '5' , 'k' : '5' , 'l' : '5' , 'm' : '6' , 'n' : '6' , 'o' : '6' , 'p' : '7' , 'q' : '7' , 'r' : '7' , 's' : '7' , 't' : '8' , 'u' : '8' , 'v' : '8' , 'w' : '9' , 'x' : '9' , 'y' : '9' , 'z' : '9' }
return '' . join ( char2number . get ( c , c ) for c in phone . lower ( ) )
phone2numeric = allow_lazy ( phone2numeric )
def compress_string ( s ) :
zbuf = BytesIO ( )
zfile = GzipFile ( mode = 'wb' , compresslevel = 6 , fileobj = zbuf )
zfile . write ( s )
zfile . close ( )
return zbuf . getvalue ( )
class StreamingBuffer ( object ) :
def __init__ ( self ) :
self . vals = [ ]
def write ( self , val ) :
self . vals . append ( val )
def read ( self ) :
ret = b'' . join ( self . vals )
self . vals = [ ]
return ret
def flush ( self ) :
return
def close ( self ) :
return
def compress_sequence ( sequence ) :
buf = StreamingBuffer ( )
zfile = GzipFile ( mode = 'wb' , compresslevel = 6 , fileobj = buf )
yield buf . read ( )
for item in sequence :
zfile . write ( item )
zfile . flush ( )
yield buf . read ( )
zfile . close ( )
yield buf . read ( )
ustring_re = re . compile ( "([\u0080-\uffff])" )
def javascript_quote ( s , quote_double_quotes = False ) :
msg = ( "django.utils.text.javascript_quote() is deprecated. " "Use django.utils.html.escapejs() instead." )
warnings . warn ( msg , RemovedInDjango19Warning , stacklevel = 2 )
def fix ( match ) :
return "\\u%04x" % ord ( match . group ( 1 ) )
if type ( s ) == bytes :
s = s . decode ( 'utf-8' )
elif type ( s ) != six . text_type :
raise TypeError ( s )
s = s . replace ( '\\' , '\\\\' )
s = s . replace ( '\r' , '\\r' )
s = s . replace ( '\n' , '\\n' )
s = s . replace ( '\t' , '\\t' )
s = s . replace ( "'" , "\\'" )
s = s . replace ( '' , '<\\/' )
if quote_double_quotes :
s = s . replace ( '"' , '"' )
return ustring_re . sub ( fix , s )
javascript_quote = allow_lazy ( javascript_quote , six . text_type )
smart_split_re = re . compile ( r""" ((?: [^\s'"]* (?: (?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*') [^\s'"]* )+ ) | \S+) """ , re . VERBOSE )
def smart_split ( text ) :
text = force_text ( text )
for bit in smart_split_re . finditer ( text ) :
yield bit . group ( 0 )
def _replace_entity ( match ) :
text = match . group ( 1 )
if text [ 0 ] == '#' :
text = text [ 1 : ]
try :
if text [ 0 ] in 'xX' :
c = int ( text [ 1 : ] , 16 )
else :
c = int ( text )
return six . unichr ( c )
except ValueError :
return match . group ( 0 )
else :
try :
return six . unichr ( html_entities . name2codepoint [ text ] )
except ( ValueError , KeyError ) :
return match . group ( 0 )
_entity_re = re . compile ( r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));" )
def unescape_entities ( text ) :
return _entity_re . sub ( _replace_entity , text )
unescape_entities = allow_lazy ( unescape_entities , six . text_type )
def unescape_string_literal ( s ) :
if s [ 0 ] not in "\"'" or s [ - 1 ] != s [ 0 ] :
raise ValueError ( "Not a string literal: %r" % s )
quote = s [ 0 ]
return s [ 1 : - 1 ] . replace ( r'\%s' % quote , quote ) . replace ( r'\\' , '\\' )
unescape_string_literal = allow_lazy ( unescape_string_literal )
def slugify ( value ) :
value = unicodedata . normalize ( 'NFKD' , value ) . encode ( 'ascii' , 'ignore' ) . decode ( 'ascii' )
value = re . sub ( '[^\w\s-]' , '' , value ) . strip ( ) . lower ( )
return mark_safe ( re . sub ( '[-\s]+' , '-' , value ) )
slugify = allow_lazy ( slugify , six . text_type )
def camel_case_to_spaces ( value ) :
return re_camel_case . sub ( r' \1' , value ) . strip ( ) . lower ( )
from __future__ import unicode_literals
import datetime
from django . utils . html import avoid_wrapping
from django . utils . timezone import is_aware , utc
from django . utils . translation import ugettext , ungettext_lazy
def timesince ( d , now = None , reversed = False ) :
chunks = ( ( 60 * 60 * 24 * 365 , ungettext_lazy ( '%d year' , '%d years' ) ) , ( 60 * 60 * 24 * 30 , ungettext_lazy ( '%d month' , '%d months' ) ) , ( 60 * 60 * 24 * 7 , ungettext_lazy ( '%d week' , '%d weeks' ) ) , ( 60 * 60 * 24 , ungettext_lazy ( '%d day' , '%d days' ) ) , ( 60 * 60 , ungettext_lazy ( '%d hour' , '%d hours' ) ) , ( 60 , ungettext_lazy ( '%d minute' , '%d minutes' ) ) )
if not isinstance ( d , datetime . datetime ) :
d = datetime . datetime ( d . year , d . month , d . day )
if now and not isinstance ( now , datetime . datetime ) :
now = datetime . datetime ( now . year , now . month , now . day )
if not now :
now = datetime . datetime . now ( utc if is_aware ( d ) else None )
delta = ( d - now ) if reversed else ( now - d )
since = delta . days * 24 * 60 * 60 + delta . seconds
if since <= 0 :
return avoid_wrapping ( ugettext ( '0 minutes' ) )
for i , ( seconds , name ) in enumerate ( chunks ) :
count = since // seconds
if count != 0 :
break
result = avoid_wrapping ( name % count )
if i + 1 < len ( chunks ) :
seconds2 , name2 = chunks [ i + 1 ]
count2 = ( since - ( seconds * count ) ) // seconds2
if count2 != 0 :
result += ugettext ( ', ' ) + avoid_wrapping ( name2 % count2 )
return result
def timeuntil ( d , now = None ) :
return timesince ( d , now , reversed = True )
from datetime import datetime , timedelta , tzinfo
from threading import local
import sys
import time as _time
try :
import pytz
except ImportError :
pytz = None
from django . conf import settings
from django . utils import six
__all__ = [ 'utc' , 'get_fixed_timezone' , 'get_default_timezone' , 'get_default_timezone_name' , 'get_current_timezone' , 'get_current_timezone_name' , 'activate' , 'deactivate' , 'override' , 'localtime' , 'now' , 'is_aware' , 'is_naive' , 'make_aware' , 'make_naive' , ]
ZERO = timedelta ( 0 )
class UTC ( tzinfo ) :
def __repr__ ( self ) :
return ""
def utcoffset ( self , dt ) :
return ZERO
def tzname ( self , dt ) :
return "UTC"
def dst ( self , dt ) :
return ZERO
class FixedOffset ( tzinfo ) :
def __init__ ( self , offset = None , name = None ) :
if offset is not None :
self . __offset = timedelta ( minutes = offset )
if name is not None :
self . __name = name
def utcoffset ( self , dt ) :
return self . __offset
def tzname ( self , dt ) :
return self . __name
def dst ( self , dt ) :
return ZERO
class ReferenceLocalTimezone ( tzinfo ) :
def __init__ ( self ) :
self . STDOFFSET = timedelta ( seconds = - _time . timezone )
if _time . daylight :
self . DSTOFFSET = timedelta ( seconds = - _time . altzone )
else :
self . DSTOFFSET = self . STDOFFSET
self . DSTDIFF = self . DSTOFFSET - self . STDOFFSET
tzinfo . __init__ ( self )
def utcoffset ( self , dt ) :
if self . _isdst ( dt ) :
return self . DSTOFFSET
else :
return self . STDOFFSET
def dst ( self , dt ) :
if self . _isdst ( dt ) :
return self . DSTDIFF
else :
return ZERO
def tzname ( self , dt ) :
return _time . tzname [ self . _isdst ( dt ) ]
def _isdst ( self , dt ) :
tt = ( dt . year , dt . month , dt . day , dt . hour , dt . minute , dt . second , dt . weekday ( ) , 0 , 0 )
stamp = _time . mktime ( tt )
tt = _time . localtime ( stamp )
return tt . tm_isdst > 0
class LocalTimezone ( ReferenceLocalTimezone ) :
def tzname ( self , dt ) :
is_dst = False if dt is None else self . _isdst ( dt )
return _time . tzname [ is_dst ]
def _isdst ( self , dt ) :
try :
return super ( LocalTimezone , self ) . _isdst ( dt )
except ( OverflowError , ValueError ) as exc :
exc_type = type ( exc )
exc_value = exc_type ( "Unsupported value: %r. You should install pytz." % dt )
exc_value . __cause__ = exc
six . reraise ( exc_type , exc_value , sys . exc_info ( ) [ 2 ] )
utc = pytz . utc if pytz else UTC ( )
def get_fixed_timezone ( offset ) :
if isinstance ( offset , timedelta ) :
offset = offset . seconds // 60
sign = '-' if offset < 0 else '+'
hhmm = '%02d%02d' % divmod ( abs ( offset ) , 60 )
name = sign + hhmm
return FixedOffset ( offset , name )
_localtime = None
def get_default_timezone ( ) :
global _localtime
if _localtime is None :
if isinstance ( settings . TIME_ZONE , six . string_types ) and pytz is not None :
_localtime = pytz . timezone ( settings . TIME_ZONE )
else :
_localtime = LocalTimezone ( )
return _localtime
def get_default_timezone_name ( ) :
return _get_timezone_name ( get_default_timezone ( ) )
_active = local ( )
def get_current_timezone ( ) :
return getattr ( _active , "value" , get_default_timezone ( ) )
def get_current_timezone_name ( ) :
return _get_timezone_name ( get_current_timezone ( ) )
def _get_timezone_name ( timezone ) :
try :
return timezone . zone
except AttributeError :
return timezone . tzname ( None )
def activate ( timezone ) :
if isinstance ( timezone , tzinfo ) :
_active . value = timezone
elif isinstance ( timezone , six . string_types ) and pytz is not None :
_active . value = pytz . timezone ( timezone )
else :
raise ValueError ( "Invalid timezone: %r" % timezone )
def deactivate ( ) :
if hasattr ( _active , "value" ) :
del _active . value
class override ( object ) :
def __init__ ( self , timezone ) :
self . timezone = timezone
self . old_timezone = getattr ( _active , 'value' , None )
def __enter__ ( self ) :
if self . timezone is None :
deactivate ( )
else :
activate ( self . timezone )
def __exit__ ( self , exc_type , exc_value , traceback ) :
if self . old_timezone is None :
deactivate ( )
else :
_active . value = self . old_timezone
def template_localtime ( value , use_tz = None ) :
should_convert = ( isinstance ( value , datetime ) and ( settings . USE_TZ if use_tz is None else use_tz ) and not is_naive ( value ) and getattr ( value , 'convert_to_local_time' , True ) )
return localtime ( value ) if should_convert else value
def localtime ( value , timezone = None ) :
if timezone is None :
timezone = get_current_timezone ( )
value = value . astimezone ( timezone )
if hasattr ( timezone , 'normalize' ) :
value = timezone . normalize ( value )
return value
def now ( ) :
if settings . USE_TZ :
return datetime . utcnow ( ) . replace ( tzinfo = utc )
else :
return datetime . now ( )
def is_aware ( value ) :
return value . tzinfo is not None and value . tzinfo . utcoffset ( value ) is not None
def is_naive ( value ) :
return value . tzinfo is None or value . tzinfo . utcoffset ( value ) is None
def make_aware ( value , timezone ) :
if hasattr ( timezone , 'localize' ) :
return timezone . localize ( value , is_dst = None )
else :
if is_aware ( value ) :
raise ValueError ( "make_aware expects a naive datetime, got %s" % value )
return value . replace ( tzinfo = timezone )
def make_naive ( value , timezone ) :
value = value . astimezone ( timezone )
if hasattr ( timezone , 'normalize' ) :
value = timezone . normalize ( value )
return value . replace ( tzinfo = None )
from __future__ import unicode_literals
import re
from django . utils . encoding import force_text
from django . utils . functional import lazy
from django . utils import six
__all__ = [ 'activate' , 'deactivate' , 'override' , 'deactivate_all' , 'get_language' , 'get_language_from_request' , 'get_language_info' , 'get_language_bidi' , 'check_for_language' , 'to_locale' , 'templatize' , 'string_concat' , 'gettext' , 'gettext_lazy' , 'gettext_noop' , 'ugettext' , 'ugettext_lazy' , 'ugettext_noop' , 'ngettext' , 'ngettext_lazy' , 'ungettext' , 'ungettext_lazy' , 'pgettext' , 'pgettext_lazy' , 'npgettext' , 'npgettext_lazy' , 'LANGUAGE_SESSION_KEY' , ]
LANGUAGE_SESSION_KEY = '_language'
class TranslatorCommentWarning ( SyntaxWarning ) :
pass
class Trans ( object ) :
def __getattr__ ( self , real_name ) :
from django . conf import settings
if settings . USE_I18N :
from django . utils . translation import trans_real as trans
else :
from django . utils . translation import trans_null as trans
setattr ( self , real_name , getattr ( trans , real_name ) )
return getattr ( trans , real_name )
_trans = Trans ( )
del Trans
def gettext_noop ( message ) :
return _trans . gettext_noop ( message )
ugettext_noop = gettext_noop
def gettext ( message ) :
return _trans . gettext ( message )
def ngettext ( singular , plural , number ) :
return _trans . ngettext ( singular , plural , number )
def ugettext ( message ) :
return _trans . ugettext ( message )
def ungettext ( singular , plural , number ) :
return _trans . ungettext ( singular , plural , number )
def pgettext ( context , message ) :
return _trans . pgettext ( context , message )
def npgettext ( context , singular , plural , number ) :
return _trans . npgettext ( context , singular , plural , number )
gettext_lazy = lazy ( gettext , str )
ugettext_lazy = lazy ( ugettext , six . text_type )
pgettext_lazy = lazy ( pgettext , six . text_type )
def lazy_number ( func , resultclass , number = None , ** kwargs ) :
if isinstance ( number , int ) :
kwargs [ 'number' ] = number
proxy = lazy ( func , resultclass ) ( ** kwargs )
else :
class NumberAwareString ( resultclass ) :
def __mod__ ( self , rhs ) :
if isinstance ( rhs , dict ) and number :
try :
number_value = rhs [ number ]
except KeyError :
raise KeyError ( 'Your dictionary lacks key \'%s\'. ' 'Please provide it, because it is required to ' 'determine whether string is singular or plural.' % number )
else :
number_value = rhs
kwargs [ 'number' ] = number_value
translated = func ( ** kwargs )
try :
translated = translated % rhs
except TypeError :
pass
return translated
proxy = lazy ( lambda ** kwargs : NumberAwareString ( ) , NumberAwareString ) ( ** kwargs )
return proxy
def ngettext_lazy ( singular , plural , number = None ) :
return lazy_number ( ngettext , str , singular = singular , plural = plural , number = number )
def ungettext_lazy ( singular , plural , number = None ) :
return lazy_number ( ungettext , six . text_type , singular = singular , plural = plural , number = number )
def npgettext_lazy ( context , singular , plural , number = None ) :
return lazy_number ( npgettext , six . text_type , context = context , singular = singular , plural = plural , number = number )
def activate ( language ) :
return _trans . activate ( language )
def deactivate ( ) :
return _trans . deactivate ( )
class override ( object ) :
def __init__ ( self , language , deactivate = False ) :
self . language = language
self . deactivate = deactivate
self . old_language = get_language ( )
def __enter__ ( self ) :
if self . language is not None :
activate ( self . language )
else :
deactivate_all ( )
def __exit__ ( self , exc_type , exc_value , traceback ) :
if self . deactivate :
deactivate ( )
else :
activate ( self . old_language )
def get_language ( ) :
return _trans . get_language ( )
def get_language_bidi ( ) :
return _trans . get_language_bidi ( )
def check_for_language ( lang_code ) :
return _trans . check_for_language ( lang_code )
def to_locale ( language ) :
return _trans . to_locale ( language )
def get_language_from_request ( request , check_path = False ) :
return _trans . get_language_from_request ( request , check_path )
def get_language_from_path ( path ) :
return _trans . get_language_from_path ( path )
def templatize ( src , origin = None ) :
return _trans . templatize ( src , origin )
def deactivate_all ( ) :
return _trans . deactivate_all ( )
def _string_concat ( * strings ) :
return '' . join ( force_text ( s ) for s in strings )
string_concat = lazy ( _string_concat , six . text_type )
def get_language_info ( lang_code ) :
from django . conf . locale import LANG_INFO
try :
return LANG_INFO [ lang_code ]
except KeyError :
if '-' not in lang_code :
raise KeyError ( "Unknown language code %s." % lang_code )
generic_lang_code = lang_code . split ( '-' ) [ 0 ]
try :
return LANG_INFO [ generic_lang_code ]
except KeyError :
raise KeyError ( "Unknown language code %s and %s." % ( lang_code , generic_lang_code ) )
trim_whitespace_re = re . compile ( '\s*\n\s*' )
def trim_whitespace ( s ) :
return trim_whitespace_re . sub ( ' ' , s . strip ( ) )
from django . conf import settings
from django . utils . encoding import force_text
from django . utils . safestring import mark_safe , SafeData
def ngettext ( singular , plural , number ) :
if number == 1 :
return singular
return plural
ngettext_lazy = ngettext
def ungettext ( singular , plural , number ) :
return force_text ( ngettext ( singular , plural , number ) )
def pgettext ( context , message ) :
return ugettext ( message )
def npgettext ( context , singular , plural , number ) :
return ungettext ( singular , plural , number )
activate = lambda x : None
deactivate = deactivate_all = lambda : None
get_language = lambda : settings . LANGUAGE_CODE
get_language_bidi = lambda : settings . LANGUAGE_CODE in settings . LANGUAGES_BIDI
check_for_language = lambda x : True
def gettext ( message ) :
if isinstance ( message , SafeData ) :
return mark_safe ( message )
return message
def ugettext ( message ) :
return force_text ( gettext ( message ) )
gettext_noop = gettext_lazy = _ = gettext
def to_locale ( language ) :
p = language . find ( '-' )
if p >= 0 :
return language [ : p ] . lower ( ) + '_' + language [ p + 1 : ] . upper ( )
else :
return language . lower ( )
def get_language_from_request ( request , check_path = False ) :
return settings . LANGUAGE_CODE
def get_language_from_path ( request ) :
return None
from __future__ import unicode_literals
from collections import OrderedDict
import os
import re
import sys
import gettext as gettext_module
from threading import local
import warnings
from django . apps import apps
from django . conf import settings
from django . core . exceptions import AppRegistryNotReady
from django . dispatch import receiver
from django . test . signals import setting_changed
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . encoding import force_text
from django . utils . _os import upath
from django . utils . safestring import mark_safe , SafeData
from django . utils import six , lru_cache
from django . utils . six import StringIO
from django . utils . translation import TranslatorCommentWarning , trim_whitespace , LANGUAGE_SESSION_KEY
_translations = { }
_active = local ( )
_default = None
_supported = None
CONTEXT_SEPARATOR = "\x04"
accept_language_re = re . compile ( r''' ([A-Za-z]{1,8}(?:-[A-Za-z0-9]{1,8})*|\*) # "en", "en-au", "x-y-z", "es-419", "*" (?:\s*;\s*q=(0(?:\.\d{,3})?|1(?:.0{,3})?))? # Optional "q=1.00", "q=0.8" (?:\s*,\s*|$) # Multiple accepts per header. ''' , re . VERBOSE )
language_code_re = re . compile ( r'^[a-z]{1,8}(?:-[a-z0-9]{1,8})*$' , re . IGNORECASE )
language_code_prefix_re = re . compile ( r'^/([\w-]+)(/|$)' )
_BROWSERS_DEPRECATED_LOCALES = { 'zh-cn' : 'zh-hans' , 'zh-tw' : 'zh-hant' , }
_DJANGO_DEPRECATED_LOCALES = _BROWSERS_DEPRECATED_LOCALES
@ receiver ( setting_changed )
def reset_cache ( ** kwargs ) :
if kwargs [ 'setting' ] in ( 'LANGUAGES' , 'LANGUAGE_CODE' ) :
global _supported
_supported = None
check_for_language . cache_clear ( )
get_supported_language_variant . cache_clear ( )
def to_locale ( language , to_lower = False ) :
p = language . find ( '-' )
if p >= 0 :
if to_lower :
return language [ : p ] . lower ( ) + '_' + language [ p + 1 : ] . lower ( )
else :
if len ( language [ p + 1 : ] ) > 2 :
return language [ : p ] . lower ( ) + '_' + language [ p + 1 ] . upper ( ) + language [ p + 2 : ] . lower ( )
return language [ : p ] . lower ( ) + '_' + language [ p + 1 : ] . upper ( )
else :
return language . lower ( )
def to_language ( locale ) :
p = locale . find ( '_' )
if p >= 0 :
return locale [ : p ] . lower ( ) + '-' + locale [ p + 1 : ] . lower ( )
else :
return locale . lower ( )
class DjangoTranslation ( gettext_module . GNUTranslations ) :
def __init__ ( self , language ) :
gettext_module . GNUTranslations . __init__ ( self )
self . __language = language
self . __to_language = to_language ( language )
self . __locale = to_locale ( language )
self . plural = lambda n : int ( n != 1 )
self . _init_translation_catalog ( )
self . _add_installed_apps_translations ( )
self . _add_local_translations ( )
self . _add_fallback ( )
def __repr__ ( self ) :
return "" % self . __language
def _new_gnu_trans ( self , localedir , use_null_fallback = True ) :
translation = gettext_module . translation ( domain = 'django' , localedir = localedir , languages = [ self . __locale ] , codeset = 'utf-8' , fallback = use_null_fallback )
if not hasattr ( translation , '_catalog' ) :
translation . _catalog = { }
translation . _info = { }
return translation
def _init_translation_catalog ( self ) :
settingsfile = upath ( sys . modules [ settings . __module__ ] . __file__ )
localedir = os . path . join ( os . path . dirname ( settingsfile ) , 'locale' )
use_null_fallback = True
if self . __language == settings . LANGUAGE_CODE :
use_null_fallback = False
translation = self . _new_gnu_trans ( localedir , use_null_fallback )
self . _info = translation . _info . copy ( )
self . _catalog = translation . _catalog . copy ( )
def _add_installed_apps_translations ( self ) :
try :
app_configs = reversed ( list ( apps . get_app_configs ( ) ) )
except AppRegistryNotReady :
raise AppRegistryNotReady ( "The translation infrastructure cannot be initialized before the " "apps registry is ready. Check that you don't make non-lazy " "gettext calls at import time." )
for app_config in app_configs :
localedir = os . path . join ( app_config . path , 'locale' )
translation = self . _new_gnu_trans ( localedir )
self . merge ( translation )
def _add_local_translations ( self ) :
for localedir in reversed ( settings . LOCALE_PATHS ) :
translation = self . _new_gnu_trans ( localedir )
self . merge ( translation )
def _add_fallback ( self ) :
if self . __language == settings . LANGUAGE_CODE or self . __language == "en-us" :
return
default_translation = translation ( settings . LANGUAGE_CODE )
self . add_fallback ( default_translation )
def merge ( self , other ) :
self . _catalog . update ( other . _catalog )
def language ( self ) :
return self . __language
def to_language ( self ) :
return self . __to_language
def translation ( language ) :
global _translations
if language not in _translations :
_translations [ language ] = DjangoTranslation ( language )
return _translations [ language ]
def activate ( language ) :
if language in _DJANGO_DEPRECATED_LOCALES :
msg = ( "The use of the language code '%s' is deprecated. " "Please use the '%s' translation instead." )
warnings . warn ( msg % ( language , _DJANGO_DEPRECATED_LOCALES [ language ] ) , RemovedInDjango19Warning , stacklevel = 2 )
_active . value = translation ( language )
def deactivate ( ) :
if hasattr ( _active , "value" ) :
del _active . value
def deactivate_all ( ) :
_active . value = gettext_module . NullTranslations ( )
def get_language ( ) :
t = getattr ( _active , "value" , None )
if t is not None :
try :
return t . to_language ( )
except AttributeError :
pass
return settings . LANGUAGE_CODE
def get_language_bidi ( ) :
base_lang = get_language ( ) . split ( '-' ) [ 0 ]
return base_lang in settings . LANGUAGES_BIDI
def catalog ( ) :
global _default
t = getattr ( _active , "value" , None )
if t is not None :
return t
if _default is None :
_default = translation ( settings . LANGUAGE_CODE )
return _default
def do_translate ( message , translation_function ) :
global _default
eol_message = message . replace ( str ( '\r\n' ) , str ( '\n' ) ) . replace ( str ( '\r' ) , str ( '\n' ) )
t = getattr ( _active , "value" , None )
if t is not None :
result = getattr ( t , translation_function ) ( eol_message )
else :
if _default is None :
_default = translation ( settings . LANGUAGE_CODE )
result = getattr ( _default , translation_function ) ( eol_message )
if isinstance ( message , SafeData ) :
return mark_safe ( result )
return result
def gettext ( message ) :
return do_translate ( message , 'gettext' )
if six . PY3 :
ugettext = gettext
else :
def ugettext ( message ) :
return do_translate ( message , 'ugettext' )
def pgettext ( context , message ) :
msg_with_ctxt = "%s%s%s" % ( context , CONTEXT_SEPARATOR , message )
result = ugettext ( msg_with_ctxt )
if CONTEXT_SEPARATOR in result :
result = force_text ( message )
return result
def gettext_noop ( message ) :
return message
def do_ntranslate ( singular , plural , number , translation_function ) :
global _default
t = getattr ( _active , "value" , None )
if t is not None :
return getattr ( t , translation_function ) ( singular , plural , number )
if _default is None :
_default = translation ( settings . LANGUAGE_CODE )
return getattr ( _default , translation_function ) ( singular , plural , number )
def ngettext ( singular , plural , number ) :
return do_ntranslate ( singular , plural , number , 'ngettext' )
if six . PY3 :
ungettext = ngettext
else :
def ungettext ( singular , plural , number ) :
return do_ntranslate ( singular , plural , number , 'ungettext' )
def npgettext ( context , singular , plural , number ) :
msgs_with_ctxt = ( "%s%s%s" % ( context , CONTEXT_SEPARATOR , singular ) , "%s%s%s" % ( context , CONTEXT_SEPARATOR , plural ) , number )
result = ungettext ( * msgs_with_ctxt )
if CONTEXT_SEPARATOR in result :
result = ungettext ( singular , plural , number )
return result
def all_locale_paths ( ) :
globalpath = os . path . join ( os . path . dirname ( upath ( sys . modules [ settings . __module__ ] . __file__ ) ) , 'locale' )
return [ globalpath ] + list ( settings . LOCALE_PATHS )
@ lru_cache . lru_cache ( maxsize = 1000 )
def check_for_language ( lang_code ) :
if not language_code_re . search ( lang_code ) :
return False
for path in all_locale_paths ( ) :
if gettext_module . find ( 'django' , path , [ to_locale ( lang_code ) ] ) is not None :
return True
return False
@ lru_cache . lru_cache ( maxsize = 1000 )
def get_supported_language_variant ( lang_code , strict = False ) :
global _supported
if _supported is None :
_supported = OrderedDict ( settings . LANGUAGES )
if lang_code :
replacement = _BROWSERS_DEPRECATED_LOCALES . get ( lang_code )
if lang_code not in _supported and replacement in _supported :
return replacement
generic_lang_code = lang_code . split ( '-' ) [ 0 ]
for code in ( lang_code , generic_lang_code ) :
if code in _supported and check_for_language ( code ) :
return code
if not strict :
for supported_code in _supported :
if supported_code . startswith ( generic_lang_code + '-' ) :
return supported_code
raise LookupError ( lang_code )
def get_language_from_path ( path , strict = False ) :
regex_match = language_code_prefix_re . match ( path )
if not regex_match :
return None
lang_code = regex_match . group ( 1 )
try :
return get_supported_language_variant ( lang_code , strict = strict )
except LookupError :
return None
def get_language_from_request ( request , check_path = False ) :
global _supported
if _supported is None :
_supported = OrderedDict ( settings . LANGUAGES )
if check_path :
lang_code = get_language_from_path ( request . path_info )
if lang_code is not None :
return lang_code
if hasattr ( request , 'session' ) :
lang_code = request . session . get ( LANGUAGE_SESSION_KEY )
if lang_code in _supported and lang_code is not None and check_for_language ( lang_code ) :
return lang_code
lang_code = request . COOKIES . get ( settings . LANGUAGE_COOKIE_NAME )
try :
return get_supported_language_variant ( lang_code )
except LookupError :
pass
accept = request . META . get ( 'HTTP_ACCEPT_LANGUAGE' , '' )
for accept_lang , unused in parse_accept_lang_header ( accept ) :
if accept_lang == '*' :
break
if not language_code_re . search ( accept_lang ) :
continue
try :
return get_supported_language_variant ( accept_lang )
except LookupError :
continue
try :
return get_supported_language_variant ( settings . LANGUAGE_CODE )
except LookupError :
return settings . LANGUAGE_CODE
dot_re = re . compile ( r'\S' )
def blankout ( src , char ) :
return dot_re . sub ( char , src )
context_re = re . compile ( r"""^\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?'))\s*""" )
inline_re = re . compile ( r"""^\s*trans\s+((?:"[^"]*?")|(?:'[^']*?'))(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?\s*""" )
block_re = re . compile ( r"""^\s*blocktrans(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?(?:\s+|$)""" )
endblock_re = re . compile ( r"""^\s*endblocktrans$""" )
plural_re = re . compile ( r"""^\s*plural$""" )
constant_re = re . compile ( r"""_\(((?:".*?")|(?:'.*?'))\)""" )
one_percent_re = re . compile ( r"""(?= translators_comment_start :
out . write ( ' # %s' % line )
else :
out . write ( ' #\n' )
incomment = False
comment = [ ]
else :
comment . append ( t . contents )
elif intrans :
if t . token_type == TOKEN_BLOCK :
endbmatch = endblock_re . match ( t . contents )
pluralmatch = plural_re . match ( t . contents )
if endbmatch :
if inplural :
if message_context :
out . write ( ' npgettext(%r, %r, %r,count) ' % ( message_context , join_tokens ( singular , trimmed ) , join_tokens ( plural , trimmed ) ) )
else :
out . write ( ' ngettext(%r, %r, count) ' % ( join_tokens ( singular , trimmed ) , join_tokens ( plural , trimmed ) ) )
for part in singular :
out . write ( blankout ( part , 'S' ) )
for part in plural :
out . write ( blankout ( part , 'P' ) )
else :
if message_context :
out . write ( ' pgettext(%r, %r) ' % ( message_context , join_tokens ( singular , trimmed ) ) )
else :
out . write ( ' gettext(%r) ' % join_tokens ( singular , trimmed ) )
for part in singular :
out . write ( blankout ( part , 'S' ) )
message_context = None
intrans = False
inplural = False
singular = [ ]
plural = [ ]
elif pluralmatch :
inplural = True
else :
filemsg = ''
if origin :
filemsg = 'file %s, ' % origin
raise SyntaxError ( "Translation blocks must not include other block tags: %s (%sline %d)" % ( t . contents , filemsg , t . lineno ) )
elif t . token_type == TOKEN_VAR :
if inplural :
plural . append ( '%%(%s)s' % t . contents )
else :
singular . append ( '%%(%s)s' % t . contents )
elif t . token_type == TOKEN_TEXT :
contents = one_percent_re . sub ( '%%' , t . contents )
if inplural :
plural . append ( contents )
else :
singular . append ( contents )
else :
if comment_lineno_cache is not None :
cur_lineno = t . lineno + t . contents . count ( '\n' )
if comment_lineno_cache == cur_lineno :
if t . token_type != TOKEN_COMMENT :
for c in lineno_comment_map [ comment_lineno_cache ] :
filemsg = ''
if origin :
filemsg = 'file %s, ' % origin
warn_msg = ( "The translator-targeted comment '%s' " "(%sline %d) was ignored, because it wasn't the last item " "on the line." ) % ( c , filemsg , comment_lineno_cache )
warnings . warn ( warn_msg , TranslatorCommentWarning )
lineno_comment_map [ comment_lineno_cache ] = [ ]
else :
out . write ( '# %s' % ' | ' . join ( lineno_comment_map [ comment_lineno_cache ] ) )
comment_lineno_cache = None
if t . token_type == TOKEN_BLOCK :
imatch = inline_re . match ( t . contents )
bmatch = block_re . match ( t . contents )
cmatches = constant_re . findall ( t . contents )
if imatch :
g = imatch . group ( 1 )
if g [ 0 ] == '"' :
g = g . strip ( '"' )
elif g [ 0 ] == "'" :
g = g . strip ( "'" )
g = one_percent_re . sub ( '%%' , g )
if imatch . group ( 2 ) :
context_match = context_re . match ( imatch . group ( 2 ) )
message_context = context_match . group ( 1 )
if message_context [ 0 ] == '"' :
message_context = message_context . strip ( '"' )
elif message_context [ 0 ] == "'" :
message_context = message_context . strip ( "'" )
out . write ( ' pgettext(%r, %r) ' % ( message_context , g ) )
message_context = None
else :
out . write ( ' gettext(%r) ' % g )
elif bmatch :
for fmatch in constant_re . findall ( t . contents ) :
out . write ( ' _(%s) ' % fmatch )
if bmatch . group ( 1 ) :
context_match = context_re . match ( bmatch . group ( 1 ) )
message_context = context_match . group ( 1 )
if message_context [ 0 ] == '"' :
message_context = message_context . strip ( '"' )
elif message_context [ 0 ] == "'" :
message_context = message_context . strip ( "'" )
intrans = True
inplural = False
trimmed = 'trimmed' in t . split_contents ( )
singular = [ ]
plural = [ ]
elif cmatches :
for cmatch in cmatches :
out . write ( ' _(%s) ' % cmatch )
elif t . contents == 'comment' :
incomment = True
else :
out . write ( blankout ( t . contents , 'B' ) )
elif t . token_type == TOKEN_VAR :
parts = t . contents . split ( '|' )
cmatch = constant_re . match ( parts [ 0 ] )
if cmatch :
out . write ( ' _(%s) ' % cmatch . group ( 1 ) )
for p in parts [ 1 : ] :
if p . find ( ':_(' ) >= 0 :
out . write ( ' %s ' % p . split ( ':' , 1 ) [ 1 ] )
else :
out . write ( blankout ( p , 'F' ) )
elif t . token_type == TOKEN_COMMENT :
if t . contents . lstrip ( ) . startswith ( TRANSLATOR_COMMENT_MARK ) :
lineno_comment_map . setdefault ( t . lineno , [ ] ) . append ( t . contents )
comment_lineno_cache = t . lineno
else :
out . write ( blankout ( t . contents , 'X' ) )
return out . getvalue ( )
def parse_accept_lang_header ( lang_string ) :
result = [ ]
pieces = accept_language_re . split ( lang_string . lower ( ) )
if pieces [ - 1 ] :
return [ ]
for i in range ( 0 , len ( pieces ) - 1 , 3 ) :
first , lang , priority = pieces [ i : i + 3 ]
if first :
return [ ]
if priority :
try :
priority = float ( priority )
except ValueError :
return [ ]
if not priority :
priority = 1.0
result . append ( ( lang , priority ) )
result . sort ( key = lambda k : k [ 1 ] , reverse = True )
return result
import copy
class Node ( object ) :
default = 'DEFAULT'
def __init__ ( self , children = None , connector = None , negated = False ) :
self . children = children [ : ] if children else [ ]
self . connector = connector or self . default
self . negated = negated
@ classmethod
def _new_instance ( cls , children = None , connector = None , negated = False ) :
obj = Node ( children , connector , negated )
obj . __class__ = cls
return obj
def __str__ ( self ) :
if self . negated :
return '(NOT (%s: %s))' % ( self . connector , ', ' . join ( [ str ( c ) for c in self . children ] ) )
return '(%s: %s)' % ( self . connector , ', ' . join ( [ str ( c ) for c in self . children ] ) )
def __repr__ ( self ) :
return "<%s: %s>" % ( self . __class__ . __name__ , self )
def __deepcopy__ ( self , memodict ) :
obj = Node ( connector = self . connector , negated = self . negated )
obj . __class__ = self . __class__
obj . children = copy . deepcopy ( self . children , memodict )
return obj
def __len__ ( self ) :
return len ( self . children )
def __bool__ ( self ) :
return bool ( self . children )
def __nonzero__ ( self ) :
return type ( self ) . __bool__ ( self )
def __contains__ ( self , other ) :
return other in self . children
def _prepare_data ( self , data ) :
return data
def add ( self , data , conn_type , squash = True ) :
if data in self . children :
return data
data = self . _prepare_data ( data )
if not squash :
self . children . append ( data )
return data
if self . connector == conn_type :
if ( isinstance ( data , Node ) and not data . negated and ( data . connector == conn_type or len ( data ) == 1 ) ) :
self . children . extend ( data . children )
return self
else :
self . children . append ( data )
return data
else :
obj = self . _new_instance ( self . children , self . connector , self . negated )
self . connector = conn_type
self . children = [ obj , data ]
return data
def negate ( self ) :
self . negated = not self . negated
from __future__ import unicode_literals
from datetime import timedelta , tzinfo
import time
import warnings
from django . utils . deprecation import RemovedInDjango19Warning
from django . utils . encoding import force_str , force_text , DEFAULT_LOCALE_ENCODING
warnings . warn ( "django.utils.tzinfo will be removed in Django 1.9. " "Use django.utils.timezone instead." , RemovedInDjango19Warning , stacklevel = 2 )
class FixedOffset ( tzinfo ) :
def __init__ ( self , offset ) :
warnings . warn ( "django.utils.tzinfo.FixedOffset will be removed in Django 1.9. " "Use django.utils.timezone.get_fixed_timezone instead." , RemovedInDjango19Warning )
if isinstance ( offset , timedelta ) :
self . __offset = offset
offset = self . __offset . seconds // 60
else :
self . __offset = timedelta ( minutes = offset )
sign = '-' if offset < 0 else '+'
self . __name = "%s%02d%02d" % ( sign , abs ( offset ) / 60. , abs ( offset ) % 60 )
def __repr__ ( self ) :
return self . __name
def __getinitargs__ ( self ) :
return self . __offset ,
def utcoffset ( self , dt ) :
return self . __offset
def tzname ( self , dt ) :
return self . __name
def dst ( self , dt ) :
return timedelta ( 0 )
class LocalTimezone ( tzinfo ) :
def __init__ ( self , dt ) :
warnings . warn ( "django.utils.tzinfo.LocalTimezone will be removed in Django 1.9. " "Use django.utils.timezone.get_default_timezone instead." , RemovedInDjango19Warning )
tzinfo . __init__ ( self )
self . __dt = dt
self . _tzname = self . tzname ( dt )
def __repr__ ( self ) :
return force_str ( self . _tzname )
def __getinitargs__ ( self ) :
return self . __dt ,
def utcoffset ( self , dt ) :
if self . _isdst ( dt ) :
return timedelta ( seconds = - time . altzone )
else :
return timedelta ( seconds = - time . timezone )
def dst ( self , dt ) :
if self . _isdst ( dt ) :
return timedelta ( seconds = - time . altzone ) - timedelta ( seconds = - time . timezone )
else :
return timedelta ( 0 )
def tzname ( self , dt ) :
is_dst = False if dt is None else self . _isdst ( dt )
try :
return force_text ( time . tzname [ is_dst ] , DEFAULT_LOCALE_ENCODING )
except UnicodeDecodeError :
return None
def _isdst ( self , dt ) :
tt = ( dt . year , dt . month , dt . day , dt . hour , dt . minute , dt . second , dt . weekday ( ) , 0 , 0 )
try :
stamp = time . mktime ( tt )
except ( OverflowError , ValueError ) :
tt = ( 2037 , ) + tt [ 1 : ]
stamp = time . mktime ( tt )
tt = time . localtime ( stamp )
return tt . tm_isdst > 0
from __future__ import absolute_import
import warnings
from django . utils . deprecation import RemovedInDjango19Warning
warnings . warn ( "django.utils.unittest will be removed in Django 1.9." , RemovedInDjango19Warning , stacklevel = 2 )
try :
from unittest2 import *
except ImportError :
from unittest import *
from __future__ import unicode_literals
import datetime
import os
import subprocess
def get_version ( version = None ) :
version = get_complete_version ( version )
major = get_major_version ( version )
sub = ''
if version [ 3 ] == 'alpha' and version [ 4 ] == 0 :
git_changeset = get_git_changeset ( )
if git_changeset :
sub = '.dev%s' % git_changeset
elif version [ 3 ] != 'final' :
mapping = { 'alpha' : 'a' , 'beta' : 'b' , 'rc' : 'c' }
sub = mapping [ version [ 3 ] ] + str ( version [ 4 ] )
return str ( major + sub )
def get_major_version ( version = None ) :
version = get_complete_version ( version )
parts = 2 if version [ 2 ] == 0 else 3
major = '.' . join ( str ( x ) for x in version [ : parts ] )
return major
def get_complete_version ( version = None ) :
if version is None :
from django import VERSION as version
else :
assert len ( version ) == 5
assert version [ 3 ] in ( 'alpha' , 'beta' , 'rc' , 'final' )
return version
def get_git_changeset ( ) :
if hasattr ( get_git_changeset , 'cache' ) :
return get_git_changeset . cache
repo_dir = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) )
git_log = subprocess . Popen ( 'git log --pretty=format:%ct --quiet -1 HEAD' , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True , cwd = repo_dir , universal_newlines = True )
timestamp = git_log . communicate ( ) [ 0 ]
try :
timestamp = datetime . datetime . utcfromtimestamp ( int ( timestamp ) )
except ValueError :
changeset = None
else :
changeset = timestamp . strftime ( '%Y%m%d%H%M%S' )
get_git_changeset . cache = changeset
return changeset
from xml . sax . saxutils import XMLGenerator
class SimplerXMLGenerator ( XMLGenerator ) :
def addQuickElement ( self , name , contents = None , attrs = None ) :
if attrs is None :
attrs = { }
self . startElement ( name , attrs )
if contents is not None :
self . characters ( contents )
self . endElement ( name )