Doppler Open edX integration

Created Diff never expires
16 removals
586 lines
18 additions
588 lines
"""
"""
This is the default template for our main set of AWS servers.
This is the default template for our main set of AWS servers.


Common traits:
Common traits:
* Use memcached, and cache-backed sessions
* Use memcached, and cache-backed sessions
* Use a MySQL 5.1 database
* Use a MySQL 5.1 database
"""
"""


# We intentionally define lots of variables that aren't used, and
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
# pylint: disable=wildcard-import, unused-wildcard-import


# Pylint gets confused by path.py instances, which report themselves as class
# Pylint gets confused by path.py instances, which report themselves as class
# objects. As a result, pylint applies the wrong regex in validating names,
# objects. As a result, pylint applies the wrong regex in validating names,
# and throws spurious errors. Therefore, we disable invalid-name checking.
# and throws spurious errors. Therefore, we disable invalid-name checking.
# pylint: disable=invalid-name
# pylint: disable=invalid-name




import codecs
import codecs
import copy
import copy
import datetime
import datetime
import os
import os
import requests


import dateutil
import dateutil
import yaml
import yaml
from corsheaders.defaults import default_headers as corsheaders_default_headers
from corsheaders.defaults import default_headers as corsheaders_default_headers
from django.core.exceptions import ImproperlyConfigured
from django.core.exceptions import ImproperlyConfigured
from edx_django_utils.plugins import add_plugins
from edx_django_utils.plugins import add_plugins
from path import Path as path
from path import Path as path


from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType
from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType
from openedx.core.lib.derived import derive_settings
from openedx.core.lib.derived import derive_settings
from openedx.core.lib.logsettings import get_logger_config
from openedx.core.lib.logsettings import get_logger_config
from xmodule.modulestore.modulestore_settings import convert_module_store_setting_if_needed
from xmodule.modulestore.modulestore_settings import convert_module_store_setting_if_needed


from .common import *
from .common import *




def get_env_setting(setting):
def get_env_setting(setting):
""" Get the environment setting or return exception """
""" Get the environment setting or return exception """
try:
try:
return os.environ[setting]
return os.environ[setting]
except KeyError:
except KeyError:
error_msg = "Set the %s env variable" % setting
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg) # lint-amnesty, pylint: disable=raise-missing-from
raise ImproperlyConfigured(error_msg) # lint-amnesty, pylint: disable=raise-missing-from


################################ ALWAYS THE SAME ##############################
################################ ALWAYS THE SAME ##############################


DEBUG = False
DEBUG = False
DEFAULT_TEMPLATE_ENGINE['OPTIONS']['debug'] = False
DEFAULT_TEMPLATE_ENGINE['OPTIONS']['debug'] = False


SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'


# IMPORTANT: With this enabled, the server must always be behind a proxy that
# IMPORTANT: With this enabled, the server must always be behind a proxy that
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
# a user can fool our server into thinking it was an https connection.
# a user can fool our server into thinking it was an https connection.
# See
# See
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
# for other warnings.
# for other warnings.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
################################ END ALWAYS THE SAME ##############################
################################ END ALWAYS THE SAME ##############################


################################### Getting Environment Variables From Doppler ###################################
url = "https://api.doppler.com/v3/configs/config/secrets"

querystring = {"project": "openedx", "config": "dev"}
DOPPLER_TOKEN = AUTH_TOKENS.get('DOPPLER_TOKEN', '')
Authorization = "Basic {DOPPLER_TOKEN}".format(DOPPLER_TOKEN=DOPPLER_TOKEN)
headers = {
"Accept": "application/json",
"accepts": "application/json",
"Authorization": Authorization
}
doppler_response = requests.request("GET", url, headers=headers, params=querystring)
EMAIL_HOST_PASSWORD = doppler_response.json()['secrets']['EMAIL_HOST_PASSWORD']['raw']
################################### END Getting Environment Variables From Doppler ###############################


# A file path to a YAML file from which to load all the configuration for the edx platform
# A file path to a YAML file from which to load all the configuration for the edx platform
CONFIG_FILE = get_env_setting('LMS_CFG')
CONFIG_FILE = get_env_setting('LMS_CFG')


with codecs.open(CONFIG_FILE, encoding='utf-8') as f:
with codecs.open(CONFIG_FILE, encoding='utf-8') as f:
__config__ = yaml.safe_load(f)
__config__ = yaml.safe_load(f)


# ENV_TOKENS and AUTH_TOKENS are included for reverse compatibility.
# ENV_TOKENS and AUTH_TOKENS are included for reverse compatibility.
# Removing them may break plugins that rely on them.
# Removing them may break plugins that rely on them.
ENV_TOKENS = __config__
ENV_TOKENS = __config__
AUTH_TOKENS = __config__
AUTH_TOKENS = __config__


# Add the key/values from config into the global namespace of this module.
# Add the key/values from config into the global namespace of this module.
# But don't override the FEATURES dict because we do that in an additive way.
# But don't override the FEATURES dict because we do that in an additive way.
__config_copy__ = copy.deepcopy(__config__)
__config_copy__ = copy.deepcopy(__config__)


KEYS_WITH_MERGED_VALUES = [
KEYS_WITH_MERGED_VALUES = [
'FEATURES',
'FEATURES',
'TRACKING_BACKENDS',
'TRACKING_BACKENDS',
'EVENT_TRACKING_BACKENDS',
'EVENT_TRACKING_BACKENDS',
'JWT_AUTH',
'JWT_AUTH',
'CELERY_QUEUES',
'CELERY_QUEUES',
'MKTG_URL_LINK_MAP',
'MKTG_URL_LINK_MAP',
'MKTG_URL_OVERRIDES',
'MKTG_URL_OVERRIDES',
]
]
for key in KEYS_WITH_MERGED_VALUES:
for key in KEYS_WITH_MERGED_VALUES:
if key in __config_copy__:
if key in __config_copy__:
del __config_copy__[key]
del __config_copy__[key]


vars().update(__config_copy__)
vars().update(__config_copy__)




try:
try:
# A file path to a YAML file from which to load all the code revisions currently deployed
# A file path to a YAML file from which to load all the code revisions currently deployed
REVISION_CONFIG_FILE = get_env_setting('REVISION_CFG')
REVISION_CONFIG_FILE = get_env_setting('REVISION_CFG')


with codecs.open(REVISION_CONFIG_FILE, encoding='utf-8') as f:
with codecs.open(REVISION_CONFIG_FILE, encoding='utf-8') as f:
REVISION_CONFIG = yaml.safe_load(f)
REVISION_CONFIG = yaml.safe_load(f)
except Exception: # pylint: disable=broad-except
except Exception: # pylint: disable=broad-except
REVISION_CONFIG = {}
REVISION_CONFIG = {}


# Do NOT calculate this dynamically at startup with git because it's *slow*.
# Do NOT calculate this dynamically at startup with git because it's *slow*.
EDX_PLATFORM_REVISION = REVISION_CONFIG.get('EDX_PLATFORM_REVISION', EDX_PLATFORM_REVISION)
EDX_PLATFORM_REVISION = REVISION_CONFIG.get('EDX_PLATFORM_REVISION', EDX_PLATFORM_REVISION)


###################################### CELERY ################################
###################################### CELERY ################################


# Don't use a connection pool, since connections are dropped by ELB.
# Don't use a connection pool, since connections are dropped by ELB.
BROKER_POOL_LIMIT = 0
BROKER_POOL_LIMIT = 0
BROKER_CONNECTION_TIMEOUT = 1
BROKER_CONNECTION_TIMEOUT = 1


# For the Result Store, use the django cache named 'celery'
# For the Result Store, use the django cache named 'celery'
CELERY_RESULT_BACKEND = 'django-cache'
CELERY_RESULT_BACKEND = 'django-cache'


# When the broker is behind an ELB, use a heartbeat to refresh the
# When the broker is behind an ELB, use a heartbeat to refresh the
# connection and to detect if it has been dropped.
# connection and to detect if it has been dropped.
BROKER_HEARTBEAT = ENV_TOKENS.get('BROKER_HEARTBEAT', 60.0)
BROKER_HEARTBEAT = ENV_TOKENS.get('BROKER_HEARTBEAT', 60.0)
BROKER_HEARTBEAT_CHECKRATE = ENV_TOKENS.get('BROKER_HEARTBEAT_CHECKRATE', 2)
BROKER_HEARTBEAT_CHECKRATE = ENV_TOKENS.get('BROKER_HEARTBEAT_CHECKRATE', 2)


# Each worker should only fetch one message at a time
# Each worker should only fetch one message at a time
CELERYD_PREFETCH_MULTIPLIER = 1
CELERYD_PREFETCH_MULTIPLIER = 1


# STATIC_ROOT specifies the directory where static files are
# STATIC_ROOT specifies the directory where static files are
# collected
# collected
STATIC_ROOT_BASE = ENV_TOKENS.get('STATIC_ROOT_BASE', None)
STATIC_ROOT_BASE = ENV_TOKENS.get('STATIC_ROOT_BASE', None)
if STATIC_ROOT_BASE:
if STATIC_ROOT_BASE:
STATIC_ROOT = path(STATIC_ROOT_BASE)
STATIC_ROOT = path(STATIC_ROOT_BASE)
WEBPACK_LOADER['DEFAULT']['STATS_FILE'] = STATIC_ROOT / "webpack-stats.json"
WEBPACK_LOADER['DEFAULT']['STATS_FILE'] = STATIC_ROOT / "webpack-stats.json"
WEBPACK_LOADER['WORKERS']['STATS_FILE'] = STATIC_ROOT / "webpack-worker-stats.json"
WEBPACK_LOADER['WORKERS']['STATS_FILE'] = STATIC_ROOT / "webpack-worker-stats.json"




# STATIC_URL_BASE specifies the base url to use for static files
# STATIC_URL_BASE specifies the base url to use for static files
STATIC_URL_BASE = ENV_TOKENS.get('STATIC_URL_BASE', None)
STATIC_URL_BASE = ENV_TOKENS.get('STATIC_URL_BASE', None)
if STATIC_URL_BASE:
if STATIC_URL_BASE:
STATIC_URL = STATIC_URL_BASE
STATIC_URL = STATIC_URL_BASE
if not STATIC_URL.endswith("/"):
if not STATIC_URL.endswith("/"):
STATIC_URL += "/"
STATIC_URL += "/"


# Allow overriding build profile used by RequireJS with one
# Allow overriding build profile used by RequireJS with one
# contained on a custom theme
# contained on a custom theme
REQUIRE_BUILD_PROFILE = ENV_TOKENS.get('REQUIRE_BUILD_PROFILE', REQUIRE_BUILD_PROFILE)
REQUIRE_BUILD_PROFILE = ENV_TOKENS.get('REQUIRE_BUILD_PROFILE', REQUIRE_BUILD_PROFILE)


# The following variables use (or) instead of the default value inside (get). This is to enforce using the Lazy Text
# The following variables use (or) instead of the default value inside (get). This is to enforce using the Lazy Text
# values when the varibale is an empty string. Therefore, setting these variable as empty text in related
# values when the varibale is an empty string. Therefore, setting these variable as empty text in related
# json files will make the system reads thier values from django translation files
# json files will make the system reads thier values from django translation files
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME') or PLATFORM_NAME
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME') or PLATFORM_NAME
PLATFORM_DESCRIPTION = ENV_TOKENS.get('PLATFORM_DESCRIPTION') or PLATFORM_DESCRIPTION
PLATFORM_DESCRIPTION = ENV_TOKENS.get('PLATFORM_DESCRIPTION') or PLATFORM_DESCRIPTION


CC_MERCHANT_NAME = ENV_TOKENS.get('CC_MERCHANT_NAME', PLATFORM_NAME)
CC_MERCHANT_NAME = ENV_TOKENS.get('CC_MERCHANT_NAME', PLATFORM_NAME)
EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None)
EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None)
EMAIL_HOST = ENV_TOKENS.get('EMAIL_HOST', 'localhost') # django default is localhost
EMAIL_HOST = ENV_TOKENS.get('EMAIL_HOST', 'localhost') # django default is localhost
EMAIL_PORT = ENV_TOKENS.get('EMAIL_PORT', 25) # django default is 25
EMAIL_PORT = ENV_TOKENS.get('EMAIL_PORT', 25) # django default is 25
EMAIL_USE_TLS = ENV_TOKENS.get('EMAIL_USE_TLS', False) # django default is False
EMAIL_USE_TLS = ENV_TOKENS.get('EMAIL_USE_TLS', False) # django default is False
SITE_NAME = ENV_TOKENS['SITE_NAME']
SITE_NAME = ENV_TOKENS['SITE_NAME']
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
SESSION_COOKIE_HTTPONLY = ENV_TOKENS.get('SESSION_COOKIE_HTTPONLY', True)
SESSION_COOKIE_HTTPONLY = ENV_TOKENS.get('SESSION_COOKIE_HTTPONLY', True)


DCS_SESSION_COOKIE_SAMESITE = ENV_TOKENS.get('DCS_SESSION_COOKIE_SAMESITE', DCS_SESSION_COOKIE_SAMESITE)
DCS_SESSION_COOKIE_SAMESITE = ENV_TOKENS.get('DCS_SESSION_COOKIE_SAMESITE', DCS_SESSION_COOKIE_SAMESITE)
DCS_SESSION_COOKIE_SAMESITE_FORCE_ALL = ENV_TOKENS.get('DCS_SESSION_COOKIE_SAMESITE_FORCE_ALL', DCS_SESSION_COOKIE_SAMESITE_FORCE_ALL) # lint-amnesty, pylint: disable=line-too-long
DCS_SESSION_COOKIE_SAMESITE_FORCE_ALL = ENV_TOKENS.get('DCS_SESSION_COOKIE_SAMESITE_FORCE_ALL', DCS_SESSION_COOKIE_SAMESITE_FORCE_ALL) # lint-amnesty, pylint: disable=line-too-long


AWS_SES_REGION_NAME = ENV_TOKENS.get('AWS_SES_REGION_NAME', 'us-east-1')
AWS_SES_REGION_NAME = ENV_TOKENS.get('AWS_SES_REGION_NAME', 'us-east-1')
AWS_SES_REGION_ENDPOINT = ENV_TOKENS.get('AWS_SES_REGION_ENDPOINT', 'email.us-east-1.amazonaws.com')
AWS_SES_REGION_ENDPOINT = ENV_TOKENS.get('AWS_SES_REGION_ENDPOINT', 'email.us-east-1.amazonaws.com')


REGISTRATION_EMAIL_PATTERNS_ALLOWED = ENV_TOKENS.get('REGISTRATION_EMAIL_PATTERNS_ALLOWED')
REGISTRATION_EMAIL_PATTERNS_ALLOWED = ENV_TOKENS.get('REGISTRATION_EMAIL_PATTERNS_ALLOWED')


LMS_ROOT_URL = ENV_TOKENS.get('LMS_ROOT_URL')
LMS_ROOT_URL = ENV_TOKENS.get('LMS_ROOT_URL')
LMS_INTERNAL_ROOT_URL = ENV_TOKENS.get('LMS_INTERNAL_ROOT_URL', LMS_ROOT_URL)
LMS_INTERNAL_ROOT_URL = ENV_TOKENS.get('LMS_INTERNAL_ROOT_URL', LMS_ROOT_URL)


# List of logout URIs for each IDA that the learner should be logged out of when they logout of the LMS. Only applies to
# List of logout URIs for each IDA that the learner should be logged out of when they logout of the LMS. Only applies to
# IDA for which the social auth flow uses DOT (Django OAuth Toolkit).
# IDA for which the social auth flow uses DOT (Django OAuth Toolkit).
IDA_LOGOUT_URI_LIST = ENV_TOKENS.get('IDA_LOGOUT_URI_LIST', [])
IDA_LOGOUT_URI_LIST = ENV_TOKENS.get('IDA_LOGOUT_URI_LIST', [])


ENV_FEATURES = ENV_TOKENS.get('FEATURES', {})
ENV_FEATURES = ENV_TOKENS.get('FEATURES', {})
for feature, value in ENV_FEATURES.items():
for feature, value in ENV_FEATURES.items():
FEATURES[feature] = value
FEATURES[feature] = value


CMS_BASE = ENV_TOKENS.get('CMS_BASE', 'studio.edx.org')
CMS_BASE = ENV_TOKENS.get('CMS_BASE', 'studio.edx.org')


ALLOWED_HOSTS = [
ALLOWED_HOSTS = [
# TODO: bbeggs remove this before prod, temp fix to get load testing running
# TODO: bbeggs remove this before prod, temp fix to get load testing running
"*",
"*",
ENV_TOKENS.get('LMS_BASE'),
ENV_TOKENS.get('LMS_BASE'),
FEATURES['PREVIEW_LMS_BASE'],
FEATURES['PREVIEW_LMS_BASE'],
]
]


# allow for environments to specify what cookie name our login subsystem should use
# allow for environments to specify what cookie name our login subsystem should use
# this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can
# this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can
# happen with some browsers (e.g. Firefox)
# happen with some browsers (e.g. Firefox)
if ENV_TOKENS.get('SESSION_COOKIE_NAME', None):
if ENV_TOKENS.get('SESSION_COOKIE_NAME', None):
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME'))
SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME'))


CACHES = ENV_TOKENS['CACHES']
CACHES = ENV_TOKENS['CACHES']
# Cache used for location mapping -- called many times with the same key/value
# Cache used for location mapping -- called many times with the same key/value
# in a given request.
# in a given request.
if 'loc_cache' not in CACHES:
if 'loc_cache' not in CACHES:
CACHES['loc_cache'] = {
CACHES['loc_cache'] = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
'LOCATION': 'edx_location_mem_cache',
}
}


if 'staticfiles' in CACHES:
if 'staticfiles' in CACHES:
CACHES['staticfiles']['KEY_PREFIX'] = EDX_PLATFORM_REVISION
CACHES['staticfiles']['KEY_PREFIX'] = EDX_PLATFORM_REVISION


# In order to transition from local disk asset storage to S3 backed asset storage,
# In order to transition from local disk asset storage to S3 backed asset storage,
# we need to run asset collection twice, once for local disk and once for S3.
# we need to run asset collection twice, once for local disk and once for S3.
# Once we have migrated to service assets off S3, then we can convert this back to
# Once we have migrated to service assets off S3, then we can convert this back to
# managed by the yaml file contents
# managed by the yaml file contents
STATICFILES_STORAGE = os.environ.get('STATICFILES_STORAGE', ENV_TOKENS.get('STATICFILES_STORAGE', STATICFILES_STORAGE))
STATICFILES_STORAGE = os.environ.get('STATICFILES_STORAGE', ENV_TOKENS.get('STATICFILES_STORAGE', STATICFILES_STORAGE))


# Load all AWS_ prefixed variables to allow an S3Boto3Storage to be configured
# Load all AWS_ prefixed variables to allow an S3Boto3Storage to be configured
_locals = locals()
_locals = locals()
for key, value in ENV_TOKENS.items():
for key, value in ENV_TOKENS.items():
if key.startswith('AWS_'):
if key.startswith('AWS_'):
_locals[key] = value
_locals[key] = value


# Currency
# Currency
PAID_COURSE_REGISTRATION_CURRENCY = ENV_TOKENS.get('PAID_COURSE_REGISTRATION_CURRENCY',
PAID_COURSE_REGISTRATION_CURRENCY = ENV_TOKENS.get('PAID_COURSE_REGISTRATION_CURRENCY',
PAID_COURSE_REGISTRATION_CURRENCY)
PAID_COURSE_REGISTRATION_CURRENCY)


# We want Bulk Email running on the high-priority queue, so we define the
# We want Bulk Email running on the high-priority queue, so we define the
# routing key that points to it. At the moment, the name is the same.
# routing key that points to it. At the moment, the name is the same.
# We have to reset the value here, since we have changed the value of the queue name.
# We have to reset the value here, since we have changed the value of the queue name.
BULK_EMAIL_ROUTING_KEY = ENV_TOKENS.get('BULK_EMAIL_ROUTING_KEY', HIGH_PRIORITY_QUEUE)
BULK_EMAIL_ROUTING_KEY = ENV_TOKENS.get('BULK_EMAIL_ROUTING_KEY', HIGH_PRIORITY_QUEUE)


# We can run smaller jobs on the low priority queue. See note above for why
# We can run smaller jobs on the low priority queue. See note above for why
# we have to reset the value here.
# we have to reset the value here.
BULK_EMAIL_ROUTING_KEY_SMALL_JOBS = ENV_TOKENS.get('BULK_EMAIL_ROUTING_KEY_SMALL_JOBS', DEFAULT_PRIORITY_QUEUE)
BULK_EMAIL_ROUTING_KEY_SMALL_JOBS = ENV_TOKENS.get('BULK_EMAIL_ROUTING_KEY_SMALL_JOBS', DEFAULT_PRIORITY_QUEUE)


# Queue to use for expiring old entitlements
# Queue to use for expiring old entitlements
ENTITLEMENTS_EXPIRATION_ROUTING_KEY = ENV_TOKENS.get('ENTITLEMENTS_EXPIRATION_ROUTING_KEY', DEFAULT_PRIORITY_QUEUE)
ENTITLEMENTS_EXPIRATION_ROUTING_KEY = ENV_TOKENS.get('ENTITLEMENTS_EXPIRATION_ROUTING_KEY', DEFAULT_PRIORITY_QUEUE)


# Message expiry time in seconds
# Message expiry time in seconds
CELERY_EVENT_QUEUE_TTL = ENV_TOKENS.get('CELERY_EVENT_QUEUE_TTL', None)
CELERY_EVENT_QUEUE_TTL = ENV_TOKENS.get('CELERY_EVENT_QUEUE_TTL', None)


# Allow CELERY_QUEUES to be overwritten by ENV_TOKENS,
# Allow CELERY_QUEUES to be overwritten by ENV_TOKENS,
ENV_CELERY_QUEUES = ENV_TOKENS.get('CELERY_QUEUES', None)
ENV_CELERY_QUEUES = ENV_TOKENS.get('CELERY_QUEUES', None)
if ENV_CELERY_QUEUES:
if ENV_CELERY_QUEUES:
CELERY_QUEUES = {queue: {} for queue in ENV_CELERY_QUEUES}
CELERY_QUEUES = {queue: {} for queue in ENV_CELERY_QUEUES}


# Then add alternate environment queues
# Then add alternate environment queues
ALTERNATE_QUEUE_ENVS = ENV_TOKENS.get('ALTERNATE_WORKER_QUEUES', '').split()
ALTERNATE_QUEUE_ENVS = ENV_TOKENS.get('ALTERNATE_WORKER_QUEUES', '').split()
ALTERNATE_QUEUES = [
ALTERNATE_QUEUES = [
DEFAULT_PRIORITY_QUEUE.replace(QUEUE_VARIANT, alternate + '.')
DEFAULT_PRIORITY_QUEUE.replace(QUEUE_VARIANT, alternate + '.')
for alternate in ALTERNATE_QUEUE_ENVS
for alternate in ALTERNATE_QUEUE_ENVS
]
]
CELERY_QUEUES.update(
CELERY_QUEUES.update(
{
{
alternate: {}
alternate: {}
for alternate in ALTERNATE_QUEUES
for alternate in ALTERNATE_QUEUES
if alternate not in list(CELERY_QUEUES.keys())
if alternate not in list(CELERY_QUEUES.keys())
}
}
)
)


# following setting is for backward compatibility
# following setting is for backward compatibility
if ENV_TOKENS.get('COMPREHENSIVE_THEME_DIR', None):
if ENV_TOKENS.get('COMPREHENSIVE_THEME_DIR', None):
COMPREHENSIVE_THEME_DIR = ENV_TOKENS.get('COMPREHENSIVE_THEME_DIR')
COMPREHENSIVE_THEME_DIR = ENV_TOKENS.get('COMPREHENSIVE_THEME_DIR')




# COMPREHENSIVE_THEME_LOCALE_PATHS contain the paths to themes locale directories e.g.
# COMPREHENSIVE_THEME_LOCALE_PATHS contain the paths to themes locale directories e.g.
# "COMPREHENSIVE_THEME_LOCALE_PATHS" : [
# "COMPREHENSIVE_THEME_LOCALE_PATHS" : [
# "/edx/src/edx-themes/conf/locale"
# "/edx/src/edx-themes/conf/locale"
# ],
# ],
COMPREHENSIVE_THEME_LOCALE_PATHS = ENV_TOKENS.get('COMPREHENSIVE_THEME_LOCALE_PATHS', [])
COMPREHENSIVE_THEME_LOCALE_PATHS = ENV_TOKENS.get('COMPREHENSIVE_THEME_LOCALE_PATHS', [])




MKTG_URL_LINK_MAP.update(ENV_TOKENS.get('MKTG_URL_LINK_MAP', {}))
MKTG_URL_LINK_MAP.update(ENV_TOKENS.get('MKTG_URL_LINK_MAP', {}))
ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS = ENV_TOKENS.get(
ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS = ENV_TOKENS.get(
'ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS',
'ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS',
ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS
ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS
)
)
# Marketing link overrides
# Marketing link overrides
MKTG_URL_OVERRIDES.update(ENV_TOKENS.get('MKTG_URL_OVERRIDES', MKTG_URL_OVERRIDES))
MKTG_URL_OVERRIDES.update(ENV_TOKENS.get('MKTG_URL_OVERRIDES', MKTG_URL_OVERRIDES))


# Intentional defaults.
# Intentional defaults.
ID_VERIFICATION_SUPPORT_LINK = ENV_TOKENS.get('ID_VERIFICATION_SUPPORT_LINK', SUPPORT_SITE_LINK)
ID_VERIFICATION_SUPPORT_LINK = ENV_TOKENS.get('ID_VERIFICATION_SUPPORT_LINK', SUPPORT_SITE_LINK)
PASSWORD_RESET_SUPPORT_LINK = ENV_TOKENS.get('PASSWORD_RESET_SUPPORT_LINK', SUPPORT_SITE_LINK)
PASSWORD_RESET_SUPPORT_LINK = ENV_TOKENS.get('PASSWORD_RESET_SUPPORT_LINK', SUPPORT_SITE_LINK)
ACTIVATION_EMAIL_SUPPORT_LINK = ENV_TOKENS.get('ACTIVATION_EMAIL_SUPPORT_LINK', SUPPORT_SITE_LINK)
ACTIVATION_EMAIL_SUPPORT_LINK = ENV_TOKENS.get('ACTIVATION_EMAIL_SUPPORT_LINK', SUPPORT_SITE_LINK)
LOGIN_ISSUE_SUPPORT_LINK = ENV_TOKENS.get('LOGIN_ISSUE_SUPPORT_LINK', SUPPORT_SITE_LINK)
LOGIN_ISSUE_SUPPORT_LINK = ENV_TOKENS.get('LOGIN_ISSUE_SUPPORT_LINK', SUPPORT_SITE_LINK)


# Timezone overrides
# Timezone overrides
TIME_ZONE = ENV_TOKENS.get('CELERY_TIMEZONE', CELERY_TIMEZONE)
TIME_ZONE = ENV_TOKENS.get('CELERY_TIMEZONE', CELERY_TIMEZONE)


# Translation overrides
# Translation overrides
LANGUAGE_DICT = dict(LANGUAGES)
LANGUAGE_DICT = dict(LANGUAGES)


# Additional installed apps
# Additional installed apps
for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []):
for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []):
INSTALLED_APPS.append(app)
INSTALLED_APPS.append(app)




local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOG_DIR = ENV_TOKENS['LOG_DIR']
LOG_DIR = ENV_TOKENS['LOG_DIR']
DATA_DIR = path(ENV_TOKENS.get('DATA_DIR', DATA_DIR))
DATA_DIR = path(ENV_TOKENS.get('DATA_DIR', DATA_DIR))


LOGGING = get_logger_config(LOG_DIR,
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
logging_env=ENV_TOKENS['LOGGING_ENV'],
local_loglevel=local_loglevel,
local_loglevel=local_loglevel,
service_variant=SERVICE_VARIANT)
service_variant=SERVICE_VARIANT)


COURSE_LISTINGS = ENV_TOKENS.get('COURSE_LISTINGS', {})
COURSE_LISTINGS = ENV_TOKENS.get('COURSE_LISTINGS', {})
COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", '')
COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", '')
COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", '')
COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", '')
CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", 'test-pull')
CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", 'test-pull')


# Python lib settings
# Python lib settings
PYTHON_LIB_FILENAME = ENV_TOKENS.get('PYTHON_LIB_FILENAME', 'python_lib.zip')
PYTHON_LIB_FILENAME = ENV_TOKENS.get('PYTHON_LIB_FILENAME', 'python_lib.zip')


# Code jail settings
# Code jail settings
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
oldvalue = CODE_JAIL.get(name)
oldvalue = CODE_JAIL.get(name)
if isinstance(oldvalue, dict):
if isinstance(oldvalue, dict):
for subname, subvalue in value.items():
for subname, subvalue in value.items():
oldvalue[subname] = subvalue
oldvalue[subname] = subvalue
else:
else:
CODE_JAIL[name] = value
CODE_JAIL[name] = value


COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])
COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])


# Event Tracking
# Event Tracking
if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS:
if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS:
TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS")
TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS")


# SSL external authentication settings
# SSL external authentication settings
SSL_AUTH_EMAIL_DOMAIN = ENV_TOKENS.get("SSL_AUTH_EMAIL_DOMAIN", "MIT.EDU")
SSL_AUTH_EMAIL_DOMAIN = ENV_TOKENS.get("SSL_AUTH_EMAIL_DOMAIN", "MIT.EDU")
SSL_AUTH_DN_FORMAT_STRING = ENV_TOKENS.get(
SSL_AUTH_DN_FORMAT_STRING = ENV_TOKENS.get(
"SSL_AUTH_DN_FORMAT_STRING",
"SSL_AUTH_DN_FORMAT_STRING",
"/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}"
"/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}"
)
)


# Video Caching. Pairing country codes with CDN URLs.
# Video Caching. Pairing country codes with CDN URLs.
# Example: {'CN': 'http://api.xuetangx.com/edx/video?s3_url='}
# Example: {'CN': 'http://api.xuetangx.com/edx/video?s3_url='}
VIDEO_CDN_URL = ENV_TOKENS.get('VIDEO_CDN_URL', {})
VIDEO_CDN_URL = ENV_TOKENS.get('VIDEO_CDN_URL', {})


# Determines whether the CSRF token can be transported on
# Determines whether the CSRF token can be transported on
# unencrypted channels. It is set to False here for backward compatibility,
# unencrypted channels. It is set to False here for backward compatibility,
# but it is highly recommended that this is True for enviroments accessed
# but it is highly recommended that this is True for enviroments accessed
# by end users.
# by end users.
CSRF_COOKIE_SECURE = ENV_TOKENS.get('CSRF_COOKIE_SECURE', False)
CSRF_COOKIE_SECURE = ENV_TOKENS.get('CSRF_COOKIE_SECURE', False)


# Determines which origins are trusted for unsafe requests eg. POST requests.
# Determines which origins are trusted for unsafe requests eg. POST requests.
CSRF_TRUSTED_ORIGINS = ENV_TOKENS.get('CSRF_TRUSTED_ORIGINS', [])
CSRF_TRUSTED_ORIGINS = ENV_TOKENS.get('CSRF_TRUSTED_ORIGINS', [])


############# CORS headers for cross-domain requests #################
############# CORS headers for cross-domain requests #################


if FEATURES.get('ENABLE_CORS_HEADERS') or FEATURES.get('ENABLE_CROSS_DOMAIN_CSRF_COOKIE'):
if FEATURES.get('ENABLE_CORS_HEADERS') or FEATURES.get('ENABLE_CROSS_DOMAIN_CSRF_COOKIE'):
CORS_ALLOW_CREDENTIALS = True
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = ENV_TOKENS.get('CORS_ORIGIN_WHITELIST', ())
CORS_ORIGIN_WHITELIST = ENV_TOKENS.get('CORS_ORIGIN_WHITELIST', ())
CORS_ORIGIN_ALLOW_ALL = ENV_TOKENS.get('CORS_ORIGIN_ALLOW_ALL', False)
CORS_ORIGIN_ALLOW_ALL = ENV_TOKENS.get('CORS_ORIGIN_ALLOW_ALL', False)
CORS_ALLOW_INSECURE = ENV_TOKENS.get('CORS_ALLOW_INSECURE', False)
CORS_ALLOW_INSECURE = ENV_TOKENS.get('CORS_ALLOW_INSECURE', False)
CORS_ALLOW_HEADERS = corsheaders_default_headers + (
CORS_ALLOW_HEADERS = corsheaders_default_headers + (
'use-jwt-cookie',
'use-jwt-cookie',
)
)


# If setting a cross-domain cookie, it's really important to choose
# If setting a cross-domain cookie, it's really important to choose
# a name for the cookie that is DIFFERENT than the cookies used
# a name for the cookie that is DIFFERENT than the cookies used
# by each subdomain. For example, suppose the applications
# by each subdomain. For example, suppose the applications
# at these subdomains are configured to use the following cookie names:
# at these subdomains are configured to use the following cookie names:
#
#
# 1) foo.example.com --> "csrftoken"
# 1) foo.example.com --> "csrftoken"
# 2) baz.example.com --> "csrftoken"
# 2) baz.example.com --> "csrftoken"
# 3) bar.example.com --> "csrftoken"
# 3) bar.example.com --> "csrftoken"
#
#
# For the cross-domain version of the CSRF cookie, you need to choose
# For the cross-domain version of the CSRF cookie, you need to choose
# a name DIFFERENT than "csrftoken"; otherwise, the new token configured
# a name DIFFERENT than "csrftoken"; otherwise, the new token configured
# for ".example.com" could conflict with the other cookies,
# for ".example.com" could conflict with the other cookies,
# non-deterministically causing 403 responses.
# non-deterministically causing 403 responses.
#
#
# Because of the way Django stores cookies, the cookie name MUST
# Because of the way Django stores cookies, the cookie name MUST
# be a `str`, not unicode. Otherwise there will `TypeError`s will be raised
# be a `str`, not unicode. Otherwise there will `TypeError`s will be raised
# when Django tries to call the unicode `translate()` method with the wrong
# when Django tries to call the unicode `translate()` method with the wrong
# number of parameters.
# number of parameters.
CROSS_DOMAIN_CSRF_COOKIE_NAME = str(ENV_TOKENS.get('CROSS_DOMAIN_CSRF_COOKIE_NAME'))
CROSS_DOMAIN_CSRF_COOKIE_NAME = str(ENV_TOKENS.get('CROSS_DOMAIN_CSRF_COOKIE_NAME'))


# When setting the domain for the "cross-domain" version of the CSRF
# When setting the domain for the "cross-domain" version of the CSRF
# cookie, you should choose something like: ".example.com"
# cookie, you should choose something like: ".example.com"
# (note the leading dot), where both the referer and the host
# (note the leading dot), where both the referer and the host
# are subdomains of "example.com".
# are subdomains of "example.com".
#
#
# Browser security rules require that
# Browser security rules require that
# the cookie domain matches the domain of the server; otherwise
# the cookie domain matches the domain of the server; otherwise
# the cookie won't get set. And once the cookie gets set, the client
# the cookie won't get set. And once the cookie gets set, the client
# needs to be on a domain that matches the cookie domain, otherwise
# needs to be on a domain that matches the cookie domain, otherwise
# the client won't be able to read the cookie.
# the client won't be able to read the cookie.
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN = ENV_TOKENS.get('CROSS_DOMAIN_CSRF_COOKIE_DOMAIN')
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN = ENV_TOKENS.get('CROSS_DOMAIN_CSRF_COOKIE_DOMAIN')




# Field overrides. To use the IDDE feature, add
# Field overrides. To use the IDDE feature, add
# 'courseware.student_field_overrides.IndividualStudentOverrideProvider'.
# 'courseware.student_field_overrides.IndividualStudentOverrideProvider'.
FIELD_OVERRIDE_PROVIDERS = tuple(ENV_TOKENS.get('FIELD_OVERRIDE_PROVIDERS', []))
FIELD_OVERRIDE_PROVIDERS = tuple(ENV_TOKENS.get('FIELD_OVERRIDE_PROVIDERS', []))


############### XBlock filesystem field config ##########
############### XBlock filesystem field config ##########
if 'DJFS' in AUTH_TOKENS and AUTH_TOKENS['DJFS'] is not None:
if 'DJFS' in AUTH_TOKENS and AUTH_TOKENS['DJFS'] is not None:
DJFS = AUTH_TOKENS['DJFS']
DJFS = AUTH_TOKENS['DJFS']


############### Module Store Items ##########
############### Module Store Items ##########
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS = ENV_TOKENS.get('HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS', {})
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS = ENV_TOKENS.get('HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS', {})
# PREVIEW DOMAIN must be present in HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS for the preview to show draft changes
# PREVIEW DOMAIN must be present in HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS for the preview to show draft changes
if 'PREVIEW_LMS_BASE' in FEATURES and FEATURES['PREVIEW_LMS_BASE'] != '':
if 'PREVIEW_LMS_BASE' in FEATURES and FEATURES['PREVIEW_LMS_BASE'] != '':
PREVIEW_DOMAIN = FEATURES['PREVIEW_LMS_BASE'].split(':')[0]
PREVIEW_DOMAIN = FEATURES['PREVIEW_LMS_BASE'].split(':')[0]
# update dictionary with preview domain regex
# update dictionary with preview domain regex
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS.update({
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS.update({
PREVIEW_DOMAIN: 'draft-preferred'
PREVIEW_DOMAIN: 'draft-preferred'
})
})


MODULESTORE_FIELD_OVERRIDE_PROVIDERS = ENV_TOKENS.get(
MODULESTORE_FIELD_OVERRIDE_PROVIDERS = ENV_TOKENS.get(
'MODULESTORE_FIELD_OVERRIDE_PROVIDERS',
'MODULESTORE_FIELD_OVERRIDE_PROVIDERS',
MODULESTORE_FIELD_OVERRIDE_PROVIDERS
MODULESTORE_FIELD_OVERRIDE_PROVIDERS
)
)


XBLOCK_FIELD_DATA_WRAPPERS = ENV_TOKENS.get(
XBLOCK_FIELD_DATA_WRAPPERS = ENV_TOKENS.get(
'XBLOCK_FIELD_DATA_WRAPPERS',
'XBLOCK_FIELD_DATA_WRAPPERS',
XBLOCK_FIELD_DATA_WRAPPERS
XBLOCK_FIELD_DATA_WRAPPERS
)
)


############### Mixed Related(Secure/Not-Secure) Items ##########
############### Mixed Related(Secure/Not-Secure) Items ##########
LMS_SEGMENT_KEY = AUTH_TOKENS.get('SEGMENT_KEY')
LMS_SEGMENT_KEY = AUTH_TOKENS.get('SEGMENT_KEY')


SECRET_KEY = AUTH_TOKENS['SECRET_KEY']
SECRET_KEY = AUTH_TOKENS['SECRET_KEY']


AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
if AWS_ACCESS_KEY_ID == "":
if AWS_ACCESS_KEY_ID == "":
AWS_ACCESS_KEY_ID = None
AWS_ACCESS_KEY_ID = None


AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
if AWS_SECRET_ACCESS_KEY == "":
if AWS_SECRET_ACCESS_KEY == "":
AWS_SECRET_ACCESS_KEY = None
AWS_SECRET_ACCESS_KEY = None


AWS_STORAGE_BUCKET_NAME = AUTH_TOKENS.get('AWS_STORAGE_BUCKET_NAME', 'edxuploads')
AWS_STORAGE_BUCKET_NAME = AUTH_TOKENS.get('AWS_STORAGE_BUCKET_NAME', 'edxuploads')


# Disabling querystring auth instructs Boto to exclude the querystring parameters (e.g. signature, access key) it
# Disabling querystring auth instructs Boto to exclude the querystring parameters (e.g. signature, access key) it
# normally appends to every returned URL.
# normally appends to every returned URL.
AWS_QUERYSTRING_AUTH = AUTH_TOKENS.get('AWS_QUERYSTRING_AUTH', True)
AWS_QUERYSTRING_AUTH = AUTH_TOKENS.get('AWS_QUERYSTRING_AUTH', True)
AWS_S3_CUSTOM_DOMAIN = AUTH_TOKENS.get('AWS_S3_CUSTOM_DOMAIN', 'edxuploads.s3.amazonaws.com')
AWS_S3_CUSTOM_DOMAIN = AUTH_TOKENS.get('AWS_S3_CUSTOM_DOMAIN', 'edxuploads.s3.amazonaws.com')


if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
else:
else:
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'


# If there is a database called 'read_replica', you can use the use_read_replica_if_available
# If there is a database called 'read_replica', you can use the use_read_replica_if_available
# function in util/query.py, which is useful for very large database reads
# function in util/query.py, which is useful for very large database reads
DATABASES = AUTH_TOKENS['DATABASES']
DATABASES = AUTH_TOKENS['DATABASES']


# The normal database user does not have enough permissions to run migrations.
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*
# Migrations are run with separate credentials, given as DB_MIGRATION_*
# environment variables
# environment variables
for name, database in DATABASES.items():
for name, database in DATABASES.items():
if name != 'read_replica':
if name != 'read_replica':
database.update({
database.update({
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
})
})


XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']
XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']


# Get the MODULESTORE from auth.json, but if it doesn't exist,
# Get the MODULESTORE from auth.json, but if it doesn't exist,
# use the one from common.py
# use the one from common.py
MODULESTORE = convert_module_store_setting_if_needed(AUTH_TOKENS.get('MODULESTORE', MODULESTORE))
MODULESTORE = convert_module_store_setting_if_needed(AUTH_TOKENS.get('MODULESTORE', MODULESTORE))
MONGODB_LOG = AUTH_TOKENS.get('MONGODB_LOG', {})
MONGODB_LOG = AUTH_TOKENS.get('MONGODB_LOG', {})


EMAIL_HOST_USER = AUTH_TOKENS.get('EMAIL_HOST_USER', '') # django default is ''
EMAIL_HOST_USER = AUTH_TOKENS.get('EMAIL_HOST_USER', '') # django default is ''
EMAIL_HOST_PASSWORD = AUTH_TOKENS.get('EMAIL_HOST_PASSWORD', '') # django default is ''


############################### BLOCKSTORE #####################################
############################### BLOCKSTORE #####################################
BLOCKSTORE_API_URL = ENV_TOKENS.get('BLOCKSTORE_API_URL', None) # e.g. "https://blockstore.example.com/api/v1/"
BLOCKSTORE_API_URL = ENV_TOKENS.get('BLOCKSTORE_API_URL', None) # e.g. "https://blockstore.example.com/api/v1/"
# Configure an API auth token at (blockstore URL)/admin/authtoken/token/
# Configure an API auth token at (blockstore URL)/admin/authtoken/token/
BLOCKSTORE_API_AUTH_TOKEN = AUTH_TOKENS.get('BLOCKSTORE_API_AUTH_TOKEN', None)
BLOCKSTORE_API_AUTH_TOKEN = AUTH_TOKENS.get('BLOCKSTORE_API_AUTH_TOKEN', None)


# Datadog for events!
# Datadog for events!
DATADOG = AUTH_TOKENS.get("DATADOG", {})
DATADOG = AUTH_TOKENS.get("DATADOG", {})
DATADOG.update(ENV_TOKENS.get("DATADOG", {}))
DATADOG.update(ENV_TOKENS.get("DATADOG", {}))


# TODO: deprecated (compatibility with previous settings)
# TODO: deprecated (compatibility with previous settings)
if 'DATADOG_API' in AUTH_TOKENS:
if 'DATADOG_API' in AUTH_TOKENS:
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']


# Analytics API
# Analytics API
ANALYTICS_API_KEY = AUTH_TOKENS.get("ANALYTICS_API_KEY", ANALYTICS_API_KEY)
ANALYTICS_API_KEY = AUTH_TOKENS.get("ANALYTICS_API_KEY", ANALYTICS_API_KEY)
ANALYTICS_API_URL = ENV_TOKENS.get("ANALYTICS_API_URL", ANALYTICS_API_URL)
ANALYTICS_API_URL = ENV_TOKENS.get("ANALYTICS_API_URL", ANALYTICS_API_URL)


# Zendesk
# Zendesk
ZENDESK_USER = AUTH_TOKENS.get("ZENDESK_USER")
ZENDESK_USER = AUTH_TOKENS.get("ZENDESK_USER")
ZENDESK_API_KEY = AUTH_TOKENS.get("ZENDESK_API_KEY")
ZENDESK_API_KEY = AUTH_TOKENS.get("ZENDESK_API_KEY")


# API Key for inbound requests from Notifier service
# API Key for inbound requests from Notifier service
EDX_API_KEY = AUTH_TOKENS.get("EDX_API_KEY")
EDX_API_KEY = AUTH_TOKENS.get("EDX_API_KEY")


# Celery Broker
# Celery Broker
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "")
CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "")
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")


BROKER_URL = "{}://{}:{}@{}/{}".format(CELERY_BROKER_TRANSPORT,
BROKER_URL = "{}://{}:{}@{}/{}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_USER,
CELERY_BROKER_USER,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_VHOST)
CELERY_BROKER_VHOST)
BROKER_USE_SSL = ENV_TOKENS.get('CELERY_BROKER_USE_SSL', False)
BROKER_USE_SSL = ENV_TOKENS.get('CELERY_BROKER_USE_SSL', False)


BROKER_TRANSPORT_OPTIONS = {
BROKER_TRANSPORT_OPTIONS = {
'fanout_patterns': True,
'fanout_patterns': True,
'fanout_prefix': True,
'fanout_prefix': True,
}
}


# Block Structures
# Block Structures


# upload limits
# upload limits
STUDENT_FILEUPLOAD_MAX_SIZE = ENV_TOKENS.get("STUDENT_FILEUPLOAD_MAX_SIZE", STUDENT_FILEUPLOAD_MAX_SIZE)
STUDENT_FILEUPLOAD_MAX_SIZE = ENV_TOKENS.get("STUDENT_FILEUPLOAD_MAX_SIZE", STUDENT_FILEUPLOAD_MAX_SIZE)


# Event tracking
# Event tracking
TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['tracking_logs']['OPTIONS']['backends'].update(AUTH_TOKENS.get("EVENT_TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['tracking_logs']['OPTIONS']['backends'].update(AUTH_TOKENS.get("EVENT_TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS['segmentio']['OPTIONS']['processors'][0]['OPTIONS']['whitelist'].extend(
EVENT_TRACKING_BACKENDS['segmentio']['OPTIONS']['processors'][0]['OPTIONS']['whitelist'].extend(
AUTH_TOKENS.get("EVENT_TRACKING_SEGMENTIO_EMIT_WHITELIST", []))
AUTH_TOKENS.get("EVENT_TRACKING_SEGMENTIO_EMIT_WHITELIST", []))
TRACKING_SEGMENTIO_WEBHOOK_SECRET = AUTH_TOKENS.get(
TRACKING_SEGMENTIO_WEBHOOK_SECRET = AUTH_TOKENS.get(
"TRACKING_SEGMENTIO_WEBHOOK_SECRET",
"TRACKING_SEGMENTIO_WEBHOOK_SECRET",
TRACKING_SEGMENTIO_WEBHOOK_SECRET
TRACKING_SEGMENTIO_WEBHOOK_SECRET
)
)
TRACKING_SEGMENTIO_ALLOWED_TYPES = ENV_TOKENS.get("TRACKING_SEGMENTIO_ALLOWED_TYPES", TRACKING_SEGMENTIO_ALLOWED_TYPES)
TRACKING_SEGMENTIO_ALLOWED_TYPES = ENV_TOKENS.get("TRACKING_SEGMENTIO_ALLOWED_TYPES", TRACKING_SEGMENTIO_ALLOWED_TYPES)
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES = ENV_TOKENS.get(
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES = ENV_TOKENS.get(
"TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES",
"TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES",
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES
)
)
TRACKING_SEGMENTIO_SOURCE_MAP = ENV_TOKENS.get("TRACKING_SEGMENTIO_SOURCE_MAP", TRACKING_SEGMENTIO_SOURCE_MAP)
TRACKING_SEGMENTIO_SOURCE_MAP = ENV_TOKENS.get("TRACKING_SEGMENTIO_SOURCE_MAP", TRACKING_SEGMENTIO_SOURCE_MAP)


# Heartbeat
# Heartbeat
HEARTBEAT_CELERY_ROUTING_KEY = ENV_TOKENS.get('HEARTBEAT_CELERY_ROUTING_KEY', HEARTBEAT_CELERY_ROUTING_KEY)
HEARTBEAT_CELERY_ROUTING_KEY = ENV_TOKENS.get('HEARTBEAT_CELERY_ROUTING_KEY', HEARTBEAT_CELERY_ROUTING_KEY)


# Student identity verification settings
# Student identity verification settings
VERIFY_STUDENT = AUTH_TOKENS.get("VERIFY_STUDENT", VERIFY_STUDENT)
VERIFY_STUDENT = AUTH_TOKENS.get("VERIFY_STUDENT", VERIFY_STUDENT)
DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH = ENV_TOKENS.get(
DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH = ENV_TOKENS.get(
"DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH",
"DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH",
DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH
DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH
)
)


# Grades download
# Grades download
GRADES_DOWNLOAD_ROUTING_KEY = ENV_TOKENS.get('GRADES_DOWNLOAD_ROUTING_KEY', HIGH_MEM_QUEUE)
GRADES_DOWNLOAD_ROUTING_KEY = ENV_TOKENS.get('GRADES_DOWNLOAD_ROUTING_KEY', HIGH_MEM_QUEUE)


GRADES_DOWNLOAD = ENV_TOKENS.get("GRADES_DOWNLOAD", GRADES_DOWNLOAD)
GRADES_DOWNLOAD = ENV_TOKENS.get("GRADES_DOWNLOAD", GRADES_DOWNLOAD)


# Rate limit for regrading tasks that a grading policy change can kick off
# Rate limit for regrading tasks that a grading policy change can kick off


# financial reports
# financial reports
FINANCIAL_REPORTS = ENV_TOKENS.get("FINANCIAL_REPORTS", FINANCIAL_REPORTS)
FINANCIAL_REPORTS = ENV_TOKENS.get("FINANCIAL_REPORTS", FINANCIAL_REPORTS)


##### ORA2 ######
##### ORA2 ######
# Prefix for uploads of example-based assessment AI classifiers
# Prefix for uploads of example-based assessment AI classifiers
# This can be used to separate uploads for different environments
# This can be used to separate uploads for different environments
# within the same S3 bucket.
# within the same S3 bucket.
ORA2_FILE_PREFIX = ENV_TOKENS.get("ORA2_FILE_PREFIX", ORA2_FILE_PREFIX)
ORA2_FILE_PREFIX = ENV_TOKENS.get("ORA2_FILE_PREFIX", ORA2_FILE_PREFIX)


##### ACCOUNT LOCKOUT DEFAULT PARAMETERS #####
##### ACCOUNT LOCKOUT DEFAULT PARAMETERS #####
MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED = ENV_TOKENS.get(
MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED = ENV_TOKENS.get(
"MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED", MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED
"MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED", MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED
)
)


MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS = ENV_TOKENS.get(
MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS = ENV_TOKENS.get(
"MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS", MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS
"MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS", MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS
)
)


##### LOGISTRATION RATE LIMIT SETTINGS #####
##### LOGISTRATION RATE LIMIT SETTINGS #####
LOGISTRATION_RATELIMIT_RATE = ENV_TOKENS.get('LOGISTRATION_RATELIMIT_RATE', LOGISTRATION_RATELIMIT_RATE)
LOGISTRATION_RATELIMIT_RATE = ENV_TOKENS.get('LOGISTRATION_RATELIMIT_RATE', LOGISTRATION_RATELIMIT_RATE)
LOGISTRATION_API_RATELIMIT = ENV_TOKENS.get('LOGISTRATION_API_RATELIMIT', LOGISTRATION_API_RATELIMIT)
LOGISTRATION_API_RATELIMIT = ENV_TOKENS.get('
RESET_PASSWORD_TOKEN_VALIDATE_API_RATELIMIT = ENV_TOKENS.get(
'RESET_PASSWORD_TOKEN_VALIDATE_API_RATELIMIT', RESET_PASSWORD_TOKEN_VALIDATE_API_RATELIMIT
)
RESET_PASSWORD_API_RATELIMIT = ENV_TOKENS.get('RESET_PASSWORD_API_RATELIMIT', RESET_PASSWORD_API_RATELIMIT)

##### REGISTRATION RATE LIMIT SETTINGS #####
REGISTRATION_VALIDATION_RATELIMIT = ENV_TOKENS.get(
'REGISTRATION_VALIDATION_RATELIMIT', REGISTRATION_VALIDATION_RATELIMIT
)

REGISTRATION_RATELIMIT = ENV_TOKENS.get('REGISTRATION_RATELIMIT', REGISTRATION_RATELIMIT)

#### PASSWORD POLICY SETTINGS #####
AUTH_PASSWORD_VALIDATORS = ENV_TOKENS.get("AUTH_PASSWORD_VALIDAT