docker setup
This commit is contained in:
parent
fd19180e1d
commit
f29003c66a
3
.env
Normal file
3
.env
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
POSTGRES_USER=michel
|
||||||
|
POSTGRES_PASSWORD=apagnan
|
||||||
|
POSTGRES_DB=django
|
32
docker-compose.yml
Normal file
32
docker-compose.yml
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
name: ft_transcendence
|
||||||
|
|
||||||
|
services:
|
||||||
|
django:
|
||||||
|
build: srcs/
|
||||||
|
image: django
|
||||||
|
networks:
|
||||||
|
- network
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
container_name: django
|
||||||
|
restart: always
|
||||||
|
env_file: .env
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: postgres
|
||||||
|
volumes:
|
||||||
|
- db:/var/lib/postgresql/data
|
||||||
|
networks:
|
||||||
|
- network
|
||||||
|
container_name: db
|
||||||
|
restart: always
|
||||||
|
env_file: .env
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
db:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
network:
|
||||||
|
driver: bridge
|
4
srcs/.gitignore
vendored
Normal file
4
srcs/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
.env
|
||||||
|
*.pyc
|
||||||
|
db.sqlite3
|
||||||
|
**/migrations/**
|
247
srcs/.venv/bin/Activate.ps1
Normal file
247
srcs/.venv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
|||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Activate a Python virtual environment for the current PowerShell session.
|
||||||
|
|
||||||
|
.Description
|
||||||
|
Pushes the python executable for a virtual environment to the front of the
|
||||||
|
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||||
|
in a Python virtual environment. Makes use of the command line switches as
|
||||||
|
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||||
|
|
||||||
|
.Parameter VenvDir
|
||||||
|
Path to the directory that contains the virtual environment to activate. The
|
||||||
|
default value for this is the parent of the directory that the Activate.ps1
|
||||||
|
script is located within.
|
||||||
|
|
||||||
|
.Parameter Prompt
|
||||||
|
The prompt prefix to display when this virtual environment is activated. By
|
||||||
|
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||||
|
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Verbose
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and shows extra information about the activation as it executes.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||||
|
Activates the Python virtual environment located in the specified location.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Prompt "MyPython"
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and prefixes the current prompt with the specified string (surrounded in
|
||||||
|
parentheses) while the virtual environment is active.
|
||||||
|
|
||||||
|
.Notes
|
||||||
|
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||||
|
execution policy for the user. You can do this by issuing the following PowerShell
|
||||||
|
command:
|
||||||
|
|
||||||
|
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||||
|
|
||||||
|
For more information on Execution Policies:
|
||||||
|
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||||
|
|
||||||
|
#>
|
||||||
|
Param(
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$VenvDir,
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$Prompt
|
||||||
|
)
|
||||||
|
|
||||||
|
<# Function declarations --------------------------------------------------- #>
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Remove all shell session elements added by the Activate script, including the
|
||||||
|
addition of the virtual environment's Python executable from the beginning of
|
||||||
|
the PATH variable.
|
||||||
|
|
||||||
|
.Parameter NonDestructive
|
||||||
|
If present, do not remove this function from the global namespace for the
|
||||||
|
session.
|
||||||
|
|
||||||
|
#>
|
||||||
|
function global:deactivate ([switch]$NonDestructive) {
|
||||||
|
# Revert to original values
|
||||||
|
|
||||||
|
# The prior prompt:
|
||||||
|
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||||
|
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||||
|
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PYTHONHOME:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PATH:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the VIRTUAL_ENV altogether:
|
||||||
|
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||||
|
Remove-Item -Path env:VIRTUAL_ENV
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||||
|
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||||
|
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||||
|
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||||
|
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||||
|
}
|
||||||
|
|
||||||
|
# Leave deactivate function in the global namespace if requested:
|
||||||
|
if (-not $NonDestructive) {
|
||||||
|
Remove-Item -Path function:deactivate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Description
|
||||||
|
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||||
|
given folder, and returns them in a map.
|
||||||
|
|
||||||
|
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||||
|
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||||
|
then it is considered a `key = value` line. The left hand string is the key,
|
||||||
|
the right hand is the value.
|
||||||
|
|
||||||
|
If the value starts with a `'` or a `"` then the first and last character is
|
||||||
|
stripped from the value before being captured.
|
||||||
|
|
||||||
|
.Parameter ConfigDir
|
||||||
|
Path to the directory that contains the `pyvenv.cfg` file.
|
||||||
|
#>
|
||||||
|
function Get-PyVenvConfig(
|
||||||
|
[String]
|
||||||
|
$ConfigDir
|
||||||
|
) {
|
||||||
|
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||||
|
|
||||||
|
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||||
|
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||||
|
|
||||||
|
# An empty map will be returned if no config file is found.
|
||||||
|
$pyvenvConfig = @{ }
|
||||||
|
|
||||||
|
if ($pyvenvConfigPath) {
|
||||||
|
|
||||||
|
Write-Verbose "File exists, parse `key = value` lines"
|
||||||
|
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||||
|
|
||||||
|
$pyvenvConfigContent | ForEach-Object {
|
||||||
|
$keyval = $PSItem -split "\s*=\s*", 2
|
||||||
|
if ($keyval[0] -and $keyval[1]) {
|
||||||
|
$val = $keyval[1]
|
||||||
|
|
||||||
|
# Remove extraneous quotations around a string value.
|
||||||
|
if ("'""".Contains($val.Substring(0, 1))) {
|
||||||
|
$val = $val.Substring(1, $val.Length - 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
$pyvenvConfig[$keyval[0]] = $val
|
||||||
|
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return $pyvenvConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
<# Begin Activate script --------------------------------------------------- #>
|
||||||
|
|
||||||
|
# Determine the containing directory of this script
|
||||||
|
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||||
|
|
||||||
|
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||||
|
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||||
|
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||||
|
|
||||||
|
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||||
|
# First, get the location of the virtual environment, it might not be
|
||||||
|
# VenvExecDir if specified on the command line.
|
||||||
|
if ($VenvDir) {
|
||||||
|
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||||
|
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||||
|
Write-Verbose "VenvDir=$VenvDir"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||||
|
# as `prompt`.
|
||||||
|
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||||
|
|
||||||
|
# Next, set the prompt from the command line, or the config file, or
|
||||||
|
# just use the name of the virtual environment folder.
|
||||||
|
if ($Prompt) {
|
||||||
|
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||||
|
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||||
|
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||||
|
$Prompt = $pyvenvCfg['prompt'];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||||
|
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||||
|
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Verbose "Prompt = '$Prompt'"
|
||||||
|
Write-Verbose "VenvDir='$VenvDir'"
|
||||||
|
|
||||||
|
# Deactivate any currently active virtual environment, but leave the
|
||||||
|
# deactivate function in place.
|
||||||
|
deactivate -nondestructive
|
||||||
|
|
||||||
|
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||||
|
# that there is an activated venv.
|
||||||
|
$env:VIRTUAL_ENV = $VenvDir
|
||||||
|
|
||||||
|
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||||
|
|
||||||
|
Write-Verbose "Setting prompt to '$Prompt'"
|
||||||
|
|
||||||
|
# Set the prompt to include the env name
|
||||||
|
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||||
|
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||||
|
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||||
|
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||||
|
|
||||||
|
function global:prompt {
|
||||||
|
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||||
|
_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clear PYTHONHOME
|
||||||
|
if (Test-Path -Path Env:PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
Remove-Item -Path Env:PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add the venv to the PATH
|
||||||
|
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||||
|
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
69
srcs/.venv/bin/activate
Normal file
69
srcs/.venv/bin/activate
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# This file must be used with "source bin/activate" *from bash*
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
deactivate () {
|
||||||
|
# reset old environment variables
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||||
|
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||||
|
export PATH
|
||||||
|
unset _OLD_VIRTUAL_PATH
|
||||||
|
fi
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||||
|
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||||
|
export PYTHONHOME
|
||||||
|
unset _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r 2> /dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||||
|
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||||
|
export PS1
|
||||||
|
unset _OLD_VIRTUAL_PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
unset VIRTUAL_ENV
|
||||||
|
unset VIRTUAL_ENV_PROMPT
|
||||||
|
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||||
|
# Self destruct!
|
||||||
|
unset -f deactivate
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
VIRTUAL_ENV="/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv"
|
||||||
|
export VIRTUAL_ENV
|
||||||
|
|
||||||
|
_OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
export PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||||
|
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||||
|
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||||
|
unset PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||||
|
PS1="(.venv) ${PS1:-}"
|
||||||
|
export PS1
|
||||||
|
VIRTUAL_ENV_PROMPT="(.venv) "
|
||||||
|
export VIRTUAL_ENV_PROMPT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r 2> /dev/null
|
||||||
|
fi
|
26
srcs/.venv/bin/activate.csh
Normal file
26
srcs/.venv/bin/activate.csh
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||||
|
# You cannot run it directly.
|
||||||
|
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||||
|
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
|
||||||
|
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
setenv VIRTUAL_ENV "/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv"
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||||
|
|
||||||
|
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||||
|
set prompt = "(.venv) $prompt"
|
||||||
|
setenv VIRTUAL_ENV_PROMPT "(.venv) "
|
||||||
|
endif
|
||||||
|
|
||||||
|
alias pydoc python -m pydoc
|
||||||
|
|
||||||
|
rehash
|
69
srcs/.venv/bin/activate.fish
Normal file
69
srcs/.venv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||||
|
# (https://fishshell.com/); you cannot run it directly.
|
||||||
|
|
||||||
|
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||||
|
# reset old environment variables
|
||||||
|
if test -n "$_OLD_VIRTUAL_PATH"
|
||||||
|
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||||
|
set -e _OLD_VIRTUAL_PATH
|
||||||
|
end
|
||||||
|
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||||
|
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||||
|
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||||
|
# prevents error when using nested fish instances (Issue #93858)
|
||||||
|
if functions -q _old_fish_prompt
|
||||||
|
functions -e fish_prompt
|
||||||
|
functions -c _old_fish_prompt fish_prompt
|
||||||
|
functions -e _old_fish_prompt
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
set -e VIRTUAL_ENV
|
||||||
|
set -e VIRTUAL_ENV_PROMPT
|
||||||
|
if test "$argv[1]" != "nondestructive"
|
||||||
|
# Self-destruct!
|
||||||
|
functions -e deactivate
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
set -gx VIRTUAL_ENV "/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv"
|
||||||
|
|
||||||
|
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||||
|
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||||
|
|
||||||
|
# Unset PYTHONHOME if set.
|
||||||
|
if set -q PYTHONHOME
|
||||||
|
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||||
|
set -e PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||||
|
# fish uses a function instead of an env var to generate the prompt.
|
||||||
|
|
||||||
|
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||||
|
functions -c fish_prompt _old_fish_prompt
|
||||||
|
|
||||||
|
# With the original prompt function renamed, we can override with our own.
|
||||||
|
function fish_prompt
|
||||||
|
# Save the return status of the last command.
|
||||||
|
set -l old_status $status
|
||||||
|
|
||||||
|
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||||
|
printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal)
|
||||||
|
|
||||||
|
# Restore the return status of the previous command.
|
||||||
|
echo "exit $old_status" | .
|
||||||
|
# Output the original/"old" prompt.
|
||||||
|
_old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||||
|
set -gx VIRTUAL_ENV_PROMPT "(.venv) "
|
||||||
|
end
|
8
srcs/.venv/bin/django-admin
Executable file
8
srcs/.venv/bin/django-admin
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(execute_from_command_line())
|
8
srcs/.venv/bin/pip
Executable file
8
srcs/.venv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
8
srcs/.venv/bin/pip3
Executable file
8
srcs/.venv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
8
srcs/.venv/bin/pip3.11
Executable file
8
srcs/.venv/bin/pip3.11
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
1
srcs/.venv/bin/python
Symbolic link
1
srcs/.venv/bin/python
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
/usr/bin/python
|
1
srcs/.venv/bin/python3
Symbolic link
1
srcs/.venv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
python
|
1
srcs/.venv/bin/python3.11
Symbolic link
1
srcs/.venv/bin/python3.11
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
python
|
8
srcs/.venv/bin/sqlformat
Executable file
8
srcs/.venv/bin/sqlformat
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/home/adrienlsh/42/transcendence-ws/repo/srcs/.venv/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from sqlparse.__main__ import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of Django nor the names of its contributors may be used
|
||||||
|
to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -0,0 +1,290 @@
|
|||||||
|
Django is licensed under the three-clause BSD license; see the file
|
||||||
|
LICENSE for details.
|
||||||
|
|
||||||
|
Django includes code from the Python standard library, which is licensed under
|
||||||
|
the Python license, a permissive open source license. The copyright and license
|
||||||
|
is included below for compliance with Python's terms.
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 2001-present Python Software Foundation; All Rights Reserved
|
||||||
|
|
||||||
|
A. HISTORY OF THE SOFTWARE
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||||
|
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||||
|
as a successor of a language called ABC. Guido remains Python's
|
||||||
|
principal author, although it includes many contributions from others.
|
||||||
|
|
||||||
|
In 1995, Guido continued his work on Python at the Corporation for
|
||||||
|
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||||
|
in Reston, Virginia where he released several versions of the
|
||||||
|
software.
|
||||||
|
|
||||||
|
In May 2000, Guido and the Python core development team moved to
|
||||||
|
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||||
|
year, the PythonLabs team moved to Digital Creations, which became
|
||||||
|
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
||||||
|
https://www.python.org/psf/) was formed, a non-profit organization
|
||||||
|
created specifically to own Python-related Intellectual Property.
|
||||||
|
Zope Corporation was a sponsoring member of the PSF.
|
||||||
|
|
||||||
|
All Python releases are Open Source (see http://www.opensource.org for
|
||||||
|
the Open Source Definition). Historically, most, but not all, Python
|
||||||
|
releases have also been GPL-compatible; the table below summarizes
|
||||||
|
the various releases.
|
||||||
|
|
||||||
|
Release Derived Year Owner GPL-
|
||||||
|
from compatible? (1)
|
||||||
|
|
||||||
|
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||||
|
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||||
|
1.6 1.5.2 2000 CNRI no
|
||||||
|
2.0 1.6 2000 BeOpen.com no
|
||||||
|
1.6.1 1.6 2001 CNRI yes (2)
|
||||||
|
2.1 2.0+1.6.1 2001 PSF no
|
||||||
|
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||||
|
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||||
|
2.1.2 2.1.1 2002 PSF yes
|
||||||
|
2.1.3 2.1.2 2002 PSF yes
|
||||||
|
2.2 and above 2.1.1 2001-now PSF yes
|
||||||
|
|
||||||
|
Footnotes:
|
||||||
|
|
||||||
|
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||||
|
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||||
|
a modified version without making your changes open source. The
|
||||||
|
GPL-compatible licenses make it possible to combine Python with
|
||||||
|
other software that is released under the GPL; the others don't.
|
||||||
|
|
||||||
|
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||||
|
because its license has a choice of law clause. According to
|
||||||
|
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||||
|
is "not incompatible" with the GPL.
|
||||||
|
|
||||||
|
Thanks to the many outside volunteers who have worked under Guido's
|
||||||
|
direction to make these releases possible.
|
||||||
|
|
||||||
|
|
||||||
|
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||||
|
===============================================================
|
||||||
|
|
||||||
|
Python software and documentation are licensed under the
|
||||||
|
Python Software Foundation License Version 2.
|
||||||
|
|
||||||
|
Starting with Python 3.8.6, examples, recipes, and other code in
|
||||||
|
the documentation are dual licensed under the PSF License Version 2
|
||||||
|
and the Zero-Clause BSD license.
|
||||||
|
|
||||||
|
Some software incorporated into Python is under different licenses.
|
||||||
|
The licenses are listed with code falling under that license.
|
||||||
|
|
||||||
|
|
||||||
|
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||||
|
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||||
|
otherwise using this software ("Python") in source or binary form and
|
||||||
|
its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||||
|
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||||
|
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||||
|
distribute, and otherwise use Python alone or in any derivative version,
|
||||||
|
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||||
|
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||||
|
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation;
|
||||||
|
All Rights Reserved" are retained in Python alone or in any derivative version
|
||||||
|
prepared by Licensee.
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python.
|
||||||
|
|
||||||
|
4. PSF is making Python available to Licensee on an "AS IS"
|
||||||
|
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. Nothing in this License Agreement shall be deemed to create any
|
||||||
|
relationship of agency, partnership, or joint venture between PSF and
|
||||||
|
Licensee. This License Agreement does not grant permission to use PSF
|
||||||
|
trademarks or trade name in a trademark sense to endorse or promote
|
||||||
|
products or services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By copying, installing or otherwise using Python, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||||
|
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||||
|
Individual or Organization ("Licensee") accessing and otherwise using
|
||||||
|
this software in source or binary form and its associated
|
||||||
|
documentation ("the Software").
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this BeOpen Python License
|
||||||
|
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||||
|
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||||
|
and/or display publicly, prepare derivative works, distribute, and
|
||||||
|
otherwise use the Software alone or in any derivative version,
|
||||||
|
provided, however, that the BeOpen Python License is retained in the
|
||||||
|
Software, alone or in any derivative version prepared by Licensee.
|
||||||
|
|
||||||
|
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||||
|
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||||
|
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||||
|
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||||
|
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
5. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
6. This License Agreement shall be governed by and interpreted in all
|
||||||
|
respects by the law of the State of California, excluding conflict of
|
||||||
|
law provisions. Nothing in this License Agreement shall be deemed to
|
||||||
|
create any relationship of agency, partnership, or joint venture
|
||||||
|
between BeOpen and Licensee. This License Agreement does not grant
|
||||||
|
permission to use BeOpen trademarks or trade names in a trademark
|
||||||
|
sense to endorse or promote products or services of Licensee, or any
|
||||||
|
third party. As an exception, the "BeOpen Python" logos available at
|
||||||
|
http://www.pythonlabs.com/logos.html may be used according to the
|
||||||
|
permissions granted on that web page.
|
||||||
|
|
||||||
|
7. By copying, installing or otherwise using the software, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||||
|
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||||
|
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||||
|
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||||
|
source or binary form and its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||||
|
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||||
|
license to reproduce, analyze, test, perform and/or display publicly,
|
||||||
|
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||||
|
alone or in any derivative version, provided, however, that CNRI's
|
||||||
|
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||||
|
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||||
|
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||||
|
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||||
|
Agreement, Licensee may substitute the following text (omitting the
|
||||||
|
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||||
|
conditions in CNRI's License Agreement. This Agreement together with
|
||||||
|
Python 1.6.1 may be located on the internet using the following
|
||||||
|
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||||
|
Agreement may also be obtained from a proxy server on the internet
|
||||||
|
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python 1.6.1.
|
||||||
|
|
||||||
|
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||||
|
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. This License Agreement shall be governed by the federal
|
||||||
|
intellectual property law of the United States, including without
|
||||||
|
limitation the federal copyright law, and, to the extent such
|
||||||
|
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||||
|
Virginia, excluding Virginia's conflict of law provisions.
|
||||||
|
Notwithstanding the foregoing, with regard to derivative works based
|
||||||
|
on Python 1.6.1 that incorporate non-separable material that was
|
||||||
|
previously distributed under the GNU General Public License (GPL), the
|
||||||
|
law of the Commonwealth of Virginia shall govern this License
|
||||||
|
Agreement only as to issues arising under or with respect to
|
||||||
|
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||||
|
License Agreement shall be deemed to create any relationship of
|
||||||
|
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||||
|
License Agreement does not grant permission to use CNRI trademarks or
|
||||||
|
trade name in a trademark sense to endorse or promote products or
|
||||||
|
services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||||
|
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||||
|
bound by the terms and conditions of this License Agreement.
|
||||||
|
|
||||||
|
ACCEPT
|
||||||
|
|
||||||
|
|
||||||
|
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||||
|
The Netherlands. All rights reserved.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software and its
|
||||||
|
documentation for any purpose and without fee is hereby granted,
|
||||||
|
provided that the above copyright notice appear in all copies and that
|
||||||
|
both that copyright notice and this permission notice appear in
|
||||||
|
supporting documentation, and that the name of Stichting Mathematisch
|
||||||
|
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||||
|
distribution of the software without specific, written prior
|
||||||
|
permission.
|
||||||
|
|
||||||
|
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||||
|
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||||
|
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||||
|
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||||
|
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
|
PERFORMANCE OF THIS SOFTWARE.
|
@ -0,0 +1,98 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: Django
|
||||||
|
Version: 4.2.6
|
||||||
|
Summary: A high-level Python web framework that encourages rapid development and clean, pragmatic design.
|
||||||
|
Home-page: https://www.djangoproject.com/
|
||||||
|
Author: Django Software Foundation
|
||||||
|
Author-email: foundation@djangoproject.com
|
||||||
|
License: BSD-3-Clause
|
||||||
|
Project-URL: Documentation, https://docs.djangoproject.com/
|
||||||
|
Project-URL: Release notes, https://docs.djangoproject.com/en/stable/releases/
|
||||||
|
Project-URL: Funding, https://www.djangoproject.com/fundraising/
|
||||||
|
Project-URL: Source, https://github.com/django/django
|
||||||
|
Project-URL: Tracker, https://code.djangoproject.com/
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Web Environment
|
||||||
|
Classifier: Framework :: Django
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Requires-Python: >=3.8
|
||||||
|
License-File: LICENSE
|
||||||
|
License-File: LICENSE.python
|
||||||
|
License-File: AUTHORS
|
||||||
|
Requires-Dist: asgiref <4,>=3.6.0
|
||||||
|
Requires-Dist: sqlparse >=0.3.1
|
||||||
|
Requires-Dist: backports.zoneinfo ; python_version < "3.9"
|
||||||
|
Requires-Dist: tzdata ; sys_platform == "win32"
|
||||||
|
Provides-Extra: argon2
|
||||||
|
Requires-Dist: argon2-cffi >=19.1.0 ; extra == 'argon2'
|
||||||
|
Provides-Extra: bcrypt
|
||||||
|
Requires-Dist: bcrypt ; extra == 'bcrypt'
|
||||||
|
|
||||||
|
======
|
||||||
|
Django
|
||||||
|
======
|
||||||
|
|
||||||
|
Django is a high-level Python web framework that encourages rapid development
|
||||||
|
and clean, pragmatic design. Thanks for checking it out.
|
||||||
|
|
||||||
|
All documentation is in the "``docs``" directory and online at
|
||||||
|
https://docs.djangoproject.com/en/stable/. If you're just getting started,
|
||||||
|
here's how we recommend you read the docs:
|
||||||
|
|
||||||
|
* First, read ``docs/intro/install.txt`` for instructions on installing Django.
|
||||||
|
|
||||||
|
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``,
|
||||||
|
``docs/intro/tutorial02.txt``, etc.).
|
||||||
|
|
||||||
|
* If you want to set up an actual deployment server, read
|
||||||
|
``docs/howto/deployment/index.txt`` for instructions.
|
||||||
|
|
||||||
|
* You'll probably want to read through the topical guides (in ``docs/topics``)
|
||||||
|
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific
|
||||||
|
problems, and check out the reference (``docs/ref``) for gory details.
|
||||||
|
|
||||||
|
* See ``docs/README`` for instructions on building an HTML version of the docs.
|
||||||
|
|
||||||
|
Docs are updated rigorously. If you find any problems in the docs, or think
|
||||||
|
they should be clarified in any way, please take 30 seconds to fill out a
|
||||||
|
ticket here: https://code.djangoproject.com/newticket
|
||||||
|
|
||||||
|
To get more help:
|
||||||
|
|
||||||
|
* Join the ``#django`` channel on ``irc.libera.chat``. Lots of helpful people
|
||||||
|
hang out there. See https://web.libera.chat if you're new to IRC.
|
||||||
|
|
||||||
|
* Join the django-users mailing list, or read the archives, at
|
||||||
|
https://groups.google.com/group/django-users.
|
||||||
|
|
||||||
|
To contribute to Django:
|
||||||
|
|
||||||
|
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for
|
||||||
|
information about getting involved.
|
||||||
|
|
||||||
|
To run Django's test suite:
|
||||||
|
|
||||||
|
* Follow the instructions in the "Unit tests" section of
|
||||||
|
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at
|
||||||
|
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests
|
||||||
|
|
||||||
|
Supporting the Development of Django
|
||||||
|
====================================
|
||||||
|
|
||||||
|
Django's development depends on your contributions.
|
||||||
|
|
||||||
|
If you depend on Django, remember to support the Django Software Foundation: https://www.djangoproject.com/fundraising/
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.41.2)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
@ -0,0 +1,2 @@
|
|||||||
|
[console_scripts]
|
||||||
|
django-admin = django.core.management:execute_from_command_line
|
@ -0,0 +1 @@
|
|||||||
|
django
|
@ -0,0 +1,222 @@
|
|||||||
|
# don't import any costly modules
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||||
|
|
||||||
|
|
||||||
|
def warn_distutils_present():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
if is_pypy and sys.version_info < (3, 7):
|
||||||
|
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||||
|
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||||
|
return
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||||
|
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||||
|
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||||
|
"using distutils directly, ensure that setuptools is installed in the "
|
||||||
|
"traditional way (e.g. not an editable install), and/or make sure "
|
||||||
|
"that setuptools is always imported before distutils."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def clear_distutils():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn("Setuptools is replacing distutils.")
|
||||||
|
mods = [
|
||||||
|
name
|
||||||
|
for name in sys.modules
|
||||||
|
if name == "distutils" or name.startswith("distutils.")
|
||||||
|
]
|
||||||
|
for name in mods:
|
||||||
|
del sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
def enabled():
|
||||||
|
"""
|
||||||
|
Allow selection of distutils by environment variable.
|
||||||
|
"""
|
||||||
|
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||||
|
return which == 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_local_distutils():
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
clear_distutils()
|
||||||
|
|
||||||
|
# With the DistutilsMetaFinder in place,
|
||||||
|
# perform an import to cause distutils to be
|
||||||
|
# loaded from setuptools._distutils. Ref #2906.
|
||||||
|
with shim():
|
||||||
|
importlib.import_module('distutils')
|
||||||
|
|
||||||
|
# check that submodules load as expected
|
||||||
|
core = importlib.import_module('distutils.core')
|
||||||
|
assert '_distutils' in core.__file__, core.__file__
|
||||||
|
assert 'setuptools._distutils.log' not in sys.modules
|
||||||
|
|
||||||
|
|
||||||
|
def do_override():
|
||||||
|
"""
|
||||||
|
Ensure that the local copy of distutils is preferred over stdlib.
|
||||||
|
|
||||||
|
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||||
|
for more motivation.
|
||||||
|
"""
|
||||||
|
if enabled():
|
||||||
|
warn_distutils_present()
|
||||||
|
ensure_local_distutils()
|
||||||
|
|
||||||
|
|
||||||
|
class _TrivialRe:
|
||||||
|
def __init__(self, *patterns):
|
||||||
|
self._patterns = patterns
|
||||||
|
|
||||||
|
def match(self, string):
|
||||||
|
return all(pat in string for pat in self._patterns)
|
||||||
|
|
||||||
|
|
||||||
|
class DistutilsMetaFinder:
|
||||||
|
def find_spec(self, fullname, path, target=None):
|
||||||
|
# optimization: only consider top level modules and those
|
||||||
|
# found in the CPython test suite.
|
||||||
|
if path is not None and not fullname.startswith('test.'):
|
||||||
|
return
|
||||||
|
|
||||||
|
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||||
|
method = getattr(self, method_name, lambda: None)
|
||||||
|
return method()
|
||||||
|
|
||||||
|
def spec_for_distutils(self):
|
||||||
|
if self.is_cpython():
|
||||||
|
return
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import importlib.abc
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
try:
|
||||||
|
mod = importlib.import_module('setuptools._distutils')
|
||||||
|
except Exception:
|
||||||
|
# There are a couple of cases where setuptools._distutils
|
||||||
|
# may not be present:
|
||||||
|
# - An older Setuptools without a local distutils is
|
||||||
|
# taking precedence. Ref #2957.
|
||||||
|
# - Path manipulation during sitecustomize removes
|
||||||
|
# setuptools from the path but only after the hook
|
||||||
|
# has been loaded. Ref #2980.
|
||||||
|
# In either case, fall back to stdlib behavior.
|
||||||
|
return
|
||||||
|
|
||||||
|
class DistutilsLoader(importlib.abc.Loader):
|
||||||
|
def create_module(self, spec):
|
||||||
|
mod.__name__ = 'distutils'
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def exec_module(self, module):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return importlib.util.spec_from_loader(
|
||||||
|
'distutils', DistutilsLoader(), origin=mod.__file__
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_cpython():
|
||||||
|
"""
|
||||||
|
Suppress supplying distutils for CPython (build and tests).
|
||||||
|
Ref #2965 and #3007.
|
||||||
|
"""
|
||||||
|
return os.path.isfile('pybuilddir.txt')
|
||||||
|
|
||||||
|
def spec_for_pip(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running under pip.
|
||||||
|
See pypa/pip#8761 for rationale.
|
||||||
|
"""
|
||||||
|
if self.pip_imported_during_build():
|
||||||
|
return
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pip_imported_during_build(cls):
|
||||||
|
"""
|
||||||
|
Detect if pip is being imported in a build script. Ref #2355.
|
||||||
|
"""
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
return any(
|
||||||
|
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def frame_file_is_setup(frame):
|
||||||
|
"""
|
||||||
|
Return True if the indicated frame suggests a setup.py file.
|
||||||
|
"""
|
||||||
|
# some frames may not have __file__ (#2940)
|
||||||
|
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
||||||
|
|
||||||
|
def spec_for_sensitive_tests(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running select tests under CPython.
|
||||||
|
|
||||||
|
python/cpython#91169
|
||||||
|
"""
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
sensitive_tests = (
|
||||||
|
[
|
||||||
|
'test.test_distutils',
|
||||||
|
'test.test_peg_generator',
|
||||||
|
'test.test_importlib',
|
||||||
|
]
|
||||||
|
if sys.version_info < (3, 10)
|
||||||
|
else [
|
||||||
|
'test.test_distutils',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
for name in DistutilsMetaFinder.sensitive_tests:
|
||||||
|
setattr(
|
||||||
|
DistutilsMetaFinder,
|
||||||
|
f'spec_for_{name}',
|
||||||
|
DistutilsMetaFinder.spec_for_sensitive_tests,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||||
|
|
||||||
|
|
||||||
|
def add_shim():
|
||||||
|
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
||||||
|
|
||||||
|
|
||||||
|
class shim:
|
||||||
|
def __enter__(self):
|
||||||
|
insert_shim()
|
||||||
|
|
||||||
|
def __exit__(self, exc, value, tb):
|
||||||
|
remove_shim()
|
||||||
|
|
||||||
|
|
||||||
|
def insert_shim():
|
||||||
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_shim():
|
||||||
|
try:
|
||||||
|
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
@ -0,0 +1 @@
|
|||||||
|
__import__('_distutils_hack').do_override()
|
@ -0,0 +1 @@
|
|||||||
|
pip
|
@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of Django nor the names of its contributors may be used
|
||||||
|
to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -0,0 +1,246 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: asgiref
|
||||||
|
Version: 3.7.2
|
||||||
|
Summary: ASGI specs, helper code, and adapters
|
||||||
|
Home-page: https://github.com/django/asgiref/
|
||||||
|
Author: Django Software Foundation
|
||||||
|
Author-email: foundation@djangoproject.com
|
||||||
|
License: BSD-3-Clause
|
||||||
|
Project-URL: Documentation, https://asgi.readthedocs.io/
|
||||||
|
Project-URL: Further Documentation, https://docs.djangoproject.com/en/stable/topics/async/#async-adapter-functions
|
||||||
|
Project-URL: Changelog, https://github.com/django/asgiref/blob/master/CHANGELOG.txt
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Environment :: Web Environment
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: typing-extensions (>=4) ; python_version < "3.11"
|
||||||
|
Provides-Extra: tests
|
||||||
|
Requires-Dist: pytest ; extra == 'tests'
|
||||||
|
Requires-Dist: pytest-asyncio ; extra == 'tests'
|
||||||
|
Requires-Dist: mypy (>=0.800) ; extra == 'tests'
|
||||||
|
|
||||||
|
asgiref
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. image:: https://api.travis-ci.org/django/asgiref.svg
|
||||||
|
:target: https://travis-ci.org/django/asgiref
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/asgiref.svg
|
||||||
|
:target: https://pypi.python.org/pypi/asgiref
|
||||||
|
|
||||||
|
ASGI is a standard for Python asynchronous web apps and servers to communicate
|
||||||
|
with each other, and positioned as an asynchronous successor to WSGI. You can
|
||||||
|
read more at https://asgi.readthedocs.io/en/latest/
|
||||||
|
|
||||||
|
This package includes ASGI base libraries, such as:
|
||||||
|
|
||||||
|
* Sync-to-async and async-to-sync function wrappers, ``asgiref.sync``
|
||||||
|
* Server base classes, ``asgiref.server``
|
||||||
|
* A WSGI-to-ASGI adapter, in ``asgiref.wsgi``
|
||||||
|
|
||||||
|
|
||||||
|
Function wrappers
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
These allow you to wrap or decorate async or sync functions to call them from
|
||||||
|
the other style (so you can call async functions from a synchronous thread,
|
||||||
|
or vice-versa).
|
||||||
|
|
||||||
|
In particular:
|
||||||
|
|
||||||
|
* AsyncToSync lets a synchronous subthread stop and wait while the async
|
||||||
|
function is called on the main thread's event loop, and then control is
|
||||||
|
returned to the thread when the async function is finished.
|
||||||
|
|
||||||
|
* SyncToAsync lets async code call a synchronous function, which is run in
|
||||||
|
a threadpool and control returned to the async coroutine when the synchronous
|
||||||
|
function completes.
|
||||||
|
|
||||||
|
The idea is to make it easier to call synchronous APIs from async code and
|
||||||
|
asynchronous APIs from synchronous code so it's easier to transition code from
|
||||||
|
one style to the other. In the case of Channels, we wrap the (synchronous)
|
||||||
|
Django view system with SyncToAsync to allow it to run inside the (asynchronous)
|
||||||
|
ASGI server.
|
||||||
|
|
||||||
|
Note that exactly what threads things run in is very specific, and aimed to
|
||||||
|
keep maximum compatibility with old synchronous code. See
|
||||||
|
"Synchronous code & Threads" below for a full explanation. By default,
|
||||||
|
``sync_to_async`` will run all synchronous code in the program in the same
|
||||||
|
thread for safety reasons; you can disable this for more performance with
|
||||||
|
``@sync_to_async(thread_sensitive=False)``, but make sure that your code does
|
||||||
|
not rely on anything bound to threads (like database connections) when you do.
|
||||||
|
|
||||||
|
|
||||||
|
Threadlocal replacement
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
This is a drop-in replacement for ``threading.local`` that works with both
|
||||||
|
threads and asyncio Tasks. Even better, it will proxy values through from a
|
||||||
|
task-local context to a thread-local context when you use ``sync_to_async``
|
||||||
|
to run things in a threadpool, and vice-versa for ``async_to_sync``.
|
||||||
|
|
||||||
|
If you instead want true thread- and task-safety, you can set
|
||||||
|
``thread_critical`` on the Local object to ensure this instead.
|
||||||
|
|
||||||
|
|
||||||
|
Server base classes
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
Includes a ``StatelessServer`` class which provides all the hard work of
|
||||||
|
writing a stateless server (as in, does not handle direct incoming sockets
|
||||||
|
but instead consumes external streams or sockets to work out what is happening).
|
||||||
|
|
||||||
|
An example of such a server would be a chatbot server that connects out to
|
||||||
|
a central chat server and provides a "connection scope" per user chatting to
|
||||||
|
it. There's only one actual connection, but the server has to separate things
|
||||||
|
into several scopes for easier writing of the code.
|
||||||
|
|
||||||
|
You can see an example of this being used in `frequensgi <https://github.com/andrewgodwin/frequensgi>`_.
|
||||||
|
|
||||||
|
|
||||||
|
WSGI-to-ASGI adapter
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Allows you to wrap a WSGI application so it appears as a valid ASGI application.
|
||||||
|
|
||||||
|
Simply wrap it around your WSGI application like so::
|
||||||
|
|
||||||
|
asgi_application = WsgiToAsgi(wsgi_application)
|
||||||
|
|
||||||
|
The WSGI application will be run in a synchronous threadpool, and the wrapped
|
||||||
|
ASGI application will be one that accepts ``http`` class messages.
|
||||||
|
|
||||||
|
Please note that not all extended features of WSGI may be supported (such as
|
||||||
|
file handles for incoming POST bodies).
|
||||||
|
|
||||||
|
|
||||||
|
Dependencies
|
||||||
|
------------
|
||||||
|
|
||||||
|
``asgiref`` requires Python 3.7 or higher.
|
||||||
|
|
||||||
|
|
||||||
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
Please refer to the
|
||||||
|
`main Channels contributing docs <https://github.com/django/channels/blob/master/CONTRIBUTING.rst>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Testing
|
||||||
|
'''''''
|
||||||
|
|
||||||
|
To run tests, make sure you have installed the ``tests`` extra with the package::
|
||||||
|
|
||||||
|
cd asgiref/
|
||||||
|
pip install -e .[tests]
|
||||||
|
pytest
|
||||||
|
|
||||||
|
|
||||||
|
Building the documentation
|
||||||
|
''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
The documentation uses `Sphinx <http://www.sphinx-doc.org>`_::
|
||||||
|
|
||||||
|
cd asgiref/docs/
|
||||||
|
pip install sphinx
|
||||||
|
|
||||||
|
To build the docs, you can use the default tools::
|
||||||
|
|
||||||
|
sphinx-build -b html . _build/html # or `make html`, if you've got make set up
|
||||||
|
cd _build/html
|
||||||
|
python -m http.server
|
||||||
|
|
||||||
|
...or you can use ``sphinx-autobuild`` to run a server and rebuild/reload
|
||||||
|
your documentation changes automatically::
|
||||||
|
|
||||||
|
pip install sphinx-autobuild
|
||||||
|
sphinx-autobuild . _build/html
|
||||||
|
|
||||||
|
|
||||||
|
Releasing
|
||||||
|
'''''''''
|
||||||
|
|
||||||
|
To release, first add details to CHANGELOG.txt and update the version number in ``asgiref/__init__.py``.
|
||||||
|
|
||||||
|
Then, build and push the packages::
|
||||||
|
|
||||||
|
python -m build
|
||||||
|
twine upload dist/*
|
||||||
|
rm -r build/ dist/
|
||||||
|
|
||||||
|
|
||||||
|
Implementation Details
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Synchronous code & threads
|
||||||
|
''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
The ``asgiref.sync`` module provides two wrappers that let you go between
|
||||||
|
asynchronous and synchronous code at will, while taking care of the rough edges
|
||||||
|
for you.
|
||||||
|
|
||||||
|
Unfortunately, the rough edges are numerous, and the code has to work especially
|
||||||
|
hard to keep things in the same thread as much as possible. Notably, the
|
||||||
|
restrictions we are working with are:
|
||||||
|
|
||||||
|
* All synchronous code called through ``SyncToAsync`` and marked with
|
||||||
|
``thread_sensitive`` should run in the same thread as each other (and if the
|
||||||
|
outer layer of the program is synchronous, the main thread)
|
||||||
|
|
||||||
|
* If a thread already has a running async loop, ``AsyncToSync`` can't run things
|
||||||
|
on that loop if it's blocked on synchronous code that is above you in the
|
||||||
|
call stack.
|
||||||
|
|
||||||
|
The first compromise you get to might be that ``thread_sensitive`` code should
|
||||||
|
just run in the same thread and not spawn in a sub-thread, fulfilling the first
|
||||||
|
restriction, but that immediately runs you into the second restriction.
|
||||||
|
|
||||||
|
The only real solution is to essentially have a variant of ThreadPoolExecutor
|
||||||
|
that executes any ``thread_sensitive`` code on the outermost synchronous
|
||||||
|
thread - either the main thread, or a single spawned subthread.
|
||||||
|
|
||||||
|
This means you now have two basic states:
|
||||||
|
|
||||||
|
* If the outermost layer of your program is synchronous, then all async code
|
||||||
|
run through ``AsyncToSync`` will run in a per-call event loop in arbitrary
|
||||||
|
sub-threads, while all ``thread_sensitive`` code will run in the main thread.
|
||||||
|
|
||||||
|
* If the outermost layer of your program is asynchronous, then all async code
|
||||||
|
runs on the main thread's event loop, and all ``thread_sensitive`` synchronous
|
||||||
|
code will run in a single shared sub-thread.
|
||||||
|
|
||||||
|
Crucially, this means that in both cases there is a thread which is a shared
|
||||||
|
resource that all ``thread_sensitive`` code must run on, and there is a chance
|
||||||
|
that this thread is currently blocked on its own ``AsyncToSync`` call. Thus,
|
||||||
|
``AsyncToSync`` needs to act as an executor for thread code while it's blocking.
|
||||||
|
|
||||||
|
The ``CurrentThreadExecutor`` class provides this functionality; rather than
|
||||||
|
simply waiting on a Future, you can call its ``run_until_future`` method and
|
||||||
|
it will run submitted code until that Future is done. This means that code
|
||||||
|
inside the call can then run code on your thread.
|
||||||
|
|
||||||
|
|
||||||
|
Maintenance and Security
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
To report security issues, please contact security@djangoproject.com. For GPG
|
||||||
|
signatures and more security process information, see
|
||||||
|
https://docs.djangoproject.com/en/dev/internals/security/.
|
||||||
|
|
||||||
|
To report bugs or request new features, please open a new GitHub issue.
|
||||||
|
|
||||||
|
This repository is part of the Channels project. For the shepherd and maintenance team, please see the
|
||||||
|
`main Channels readme <https://github.com/django/channels/blob/master/README.rst>`_.
|
@ -0,0 +1,28 @@
|
|||||||
|
asgiref-3.7.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
asgiref-3.7.2.dist-info/LICENSE,sha256=uEZBXRtRTpwd_xSiLeuQbXlLxUbKYSn5UKGM0JHipmk,1552
|
||||||
|
asgiref-3.7.2.dist-info/METADATA,sha256=vMxxYC76PlmOumc-o6BcGHukarKP7PciLr3rHUCvdHc,9210
|
||||||
|
asgiref-3.7.2.dist-info/RECORD,,
|
||||||
|
asgiref-3.7.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
asgiref-3.7.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
||||||
|
asgiref-3.7.2.dist-info/top_level.txt,sha256=bokQjCzwwERhdBiPdvYEZa4cHxT4NCeAffQNUqJ8ssg,8
|
||||||
|
asgiref/__init__.py,sha256=VgRDlnqo96_fPqplgsON3OCRDVG8t54TJEU6yH041P0,22
|
||||||
|
asgiref/__pycache__/__init__.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/compatibility.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/current_thread_executor.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/local.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/server.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/sync.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/testing.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/timeout.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/typing.cpython-311.pyc,,
|
||||||
|
asgiref/__pycache__/wsgi.cpython-311.pyc,,
|
||||||
|
asgiref/compatibility.py,sha256=DhY1SOpOvOw0Y1lSEjCqg-znRUQKecG3LTaV48MZi68,1606
|
||||||
|
asgiref/current_thread_executor.py,sha256=lqKd8ge2Xk0Tr-JL4bic4CN8S3O1oj6wD4Or-emxipQ,3985
|
||||||
|
asgiref/local.py,sha256=nx5RqVFLYgUJVaxzApuQUW7dd9y21sruMYdgISoRs1k,4854
|
||||||
|
asgiref/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
asgiref/server.py,sha256=egTQhZo1k4G0F7SSBQNp_VOekpGcjBJZU2kkCoiGC_M,6005
|
||||||
|
asgiref/sync.py,sha256=XNqEZqOt7k1zqWPYtvyNbEXv4idWd6Rbucs2DT9mZT0,22941
|
||||||
|
asgiref/testing.py,sha256=3byNRV7Oto_Fg8Z-fErQJ3yGf7OQlcUexbN_cDQugzQ,3119
|
||||||
|
asgiref/timeout.py,sha256=LtGL-xQpG8JHprdsEUCMErJ0kNWj4qwWZhEHJ3iKu4s,3627
|
||||||
|
asgiref/typing.py,sha256=IAaNg5qosjCUcO_O0thIhDiPaSmtwVBvb7rB4FfRUwc,6238
|
||||||
|
asgiref/wsgi.py,sha256=-L0eo_uK_dq7EPjv1meW1BRGytURaO9NPESxnJc9CtA,6575
|
@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: bdist_wheel (0.40.0)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
asgiref
|
@ -0,0 +1 @@
|
|||||||
|
__version__ = "3.7.2"
|
@ -0,0 +1,48 @@
|
|||||||
|
import inspect
|
||||||
|
|
||||||
|
from .sync import iscoroutinefunction
|
||||||
|
|
||||||
|
|
||||||
|
def is_double_callable(application):
|
||||||
|
"""
|
||||||
|
Tests to see if an application is a legacy-style (double-callable) application.
|
||||||
|
"""
|
||||||
|
# Look for a hint on the object first
|
||||||
|
if getattr(application, "_asgi_single_callable", False):
|
||||||
|
return False
|
||||||
|
if getattr(application, "_asgi_double_callable", False):
|
||||||
|
return True
|
||||||
|
# Uninstanted classes are double-callable
|
||||||
|
if inspect.isclass(application):
|
||||||
|
return True
|
||||||
|
# Instanted classes depend on their __call__
|
||||||
|
if hasattr(application, "__call__"):
|
||||||
|
# We only check to see if its __call__ is a coroutine function -
|
||||||
|
# if it's not, it still might be a coroutine function itself.
|
||||||
|
if iscoroutinefunction(application.__call__):
|
||||||
|
return False
|
||||||
|
# Non-classes we just check directly
|
||||||
|
return not iscoroutinefunction(application)
|
||||||
|
|
||||||
|
|
||||||
|
def double_to_single_callable(application):
|
||||||
|
"""
|
||||||
|
Transforms a double-callable ASGI application into a single-callable one.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def new_application(scope, receive, send):
|
||||||
|
instance = application(scope)
|
||||||
|
return await instance(receive, send)
|
||||||
|
|
||||||
|
return new_application
|
||||||
|
|
||||||
|
|
||||||
|
def guarantee_single_callable(application):
|
||||||
|
"""
|
||||||
|
Takes either a single- or double-callable application and always returns it
|
||||||
|
in single-callable style. Use this to add backwards compatibility for ASGI
|
||||||
|
2.0 applications to your server/test harness/etc.
|
||||||
|
"""
|
||||||
|
if is_double_callable(application):
|
||||||
|
application = double_to_single_callable(application)
|
||||||
|
return application
|
@ -0,0 +1,115 @@
|
|||||||
|
import queue
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
from concurrent.futures import Executor, Future
|
||||||
|
from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import ParamSpec
|
||||||
|
else:
|
||||||
|
from typing_extensions import ParamSpec
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
_P = ParamSpec("_P")
|
||||||
|
_R = TypeVar("_R")
|
||||||
|
|
||||||
|
|
||||||
|
class _WorkItem:
|
||||||
|
"""
|
||||||
|
Represents an item needing to be run in the executor.
|
||||||
|
Copied from ThreadPoolExecutor (but it's private, so we're not going to rely on importing it)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
future: "Future[_R]",
|
||||||
|
fn: Callable[_P, _R],
|
||||||
|
*args: _P.args,
|
||||||
|
**kwargs: _P.kwargs,
|
||||||
|
):
|
||||||
|
self.future = future
|
||||||
|
self.fn = fn
|
||||||
|
self.args = args
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
__traceback_hide__ = True # noqa: F841
|
||||||
|
if not self.future.set_running_or_notify_cancel():
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
result = self.fn(*self.args, **self.kwargs)
|
||||||
|
except BaseException as exc:
|
||||||
|
self.future.set_exception(exc)
|
||||||
|
# Break a reference cycle with the exception 'exc'
|
||||||
|
self = None # type: ignore[assignment]
|
||||||
|
else:
|
||||||
|
self.future.set_result(result)
|
||||||
|
|
||||||
|
|
||||||
|
class CurrentThreadExecutor(Executor):
|
||||||
|
"""
|
||||||
|
An Executor that actually runs code in the thread it is instantiated in.
|
||||||
|
Passed to other threads running async code, so they can run sync code in
|
||||||
|
the thread they came from.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._work_thread = threading.current_thread()
|
||||||
|
self._work_queue: queue.Queue[Union[_WorkItem, "Future[Any]"]] = queue.Queue()
|
||||||
|
self._broken = False
|
||||||
|
|
||||||
|
def run_until_future(self, future: "Future[Any]") -> None:
|
||||||
|
"""
|
||||||
|
Runs the code in the work queue until a result is available from the future.
|
||||||
|
Should be run from the thread the executor is initialised in.
|
||||||
|
"""
|
||||||
|
# Check we're in the right thread
|
||||||
|
if threading.current_thread() != self._work_thread:
|
||||||
|
raise RuntimeError(
|
||||||
|
"You cannot run CurrentThreadExecutor from a different thread"
|
||||||
|
)
|
||||||
|
future.add_done_callback(self._work_queue.put)
|
||||||
|
# Keep getting and running work items until we get the future we're waiting for
|
||||||
|
# back via the future's done callback.
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
# Get a work item and run it
|
||||||
|
work_item = self._work_queue.get()
|
||||||
|
if work_item is future:
|
||||||
|
return
|
||||||
|
assert isinstance(work_item, _WorkItem)
|
||||||
|
work_item.run()
|
||||||
|
del work_item
|
||||||
|
finally:
|
||||||
|
self._broken = True
|
||||||
|
|
||||||
|
def _submit(
|
||||||
|
self,
|
||||||
|
fn: Callable[_P, _R],
|
||||||
|
*args: _P.args,
|
||||||
|
**kwargs: _P.kwargs,
|
||||||
|
) -> "Future[_R]":
|
||||||
|
# Check they're not submitting from the same thread
|
||||||
|
if threading.current_thread() == self._work_thread:
|
||||||
|
raise RuntimeError(
|
||||||
|
"You cannot submit onto CurrentThreadExecutor from its own thread"
|
||||||
|
)
|
||||||
|
# Check they're not too late or the executor errored
|
||||||
|
if self._broken:
|
||||||
|
raise RuntimeError("CurrentThreadExecutor already quit or is broken")
|
||||||
|
# Add to work queue
|
||||||
|
f: "Future[_R]" = Future()
|
||||||
|
work_item = _WorkItem(f, fn, *args, **kwargs)
|
||||||
|
self._work_queue.put(work_item)
|
||||||
|
# Return the future
|
||||||
|
return f
|
||||||
|
|
||||||
|
# Python 3.9+ has a new signature for submit with a "/" after `fn`, to enforce
|
||||||
|
# it to be a positional argument. If we ignore[override] mypy on 3.9+ will be
|
||||||
|
# happy but 3.7/3.8 will say that the ignore comment is unused, even when
|
||||||
|
# defining them differently based on sys.version_info.
|
||||||
|
# We should be able to remove this when we drop support for 3.7/3.8.
|
||||||
|
if not TYPE_CHECKING:
|
||||||
|
|
||||||
|
def submit(self, fn, *args, **kwargs):
|
||||||
|
return self._submit(fn, *args, **kwargs)
|
120
srcs/.venv/lib/python3.11/site-packages/asgiref/local.py
Normal file
120
srcs/.venv/lib/python3.11/site-packages/asgiref/local.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
import random
|
||||||
|
import string
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
|
||||||
|
class Local:
|
||||||
|
"""
|
||||||
|
A drop-in replacement for threading.locals that also works with asyncio
|
||||||
|
Tasks (via the current_task asyncio method), and passes locals through
|
||||||
|
sync_to_async and async_to_sync.
|
||||||
|
|
||||||
|
Specifically:
|
||||||
|
- Locals work per-coroutine on any thread not spawned using asgiref
|
||||||
|
- Locals work per-thread on any thread not spawned using asgiref
|
||||||
|
- Locals are shared with the parent coroutine when using sync_to_async
|
||||||
|
- Locals are shared with the parent thread when using async_to_sync
|
||||||
|
(and if that thread was launched using sync_to_async, with its parent
|
||||||
|
coroutine as well, with this working for indefinite levels of nesting)
|
||||||
|
|
||||||
|
Set thread_critical to True to not allow locals to pass from an async Task
|
||||||
|
to a thread it spawns. This is needed for code that truly needs
|
||||||
|
thread-safety, as opposed to things used for helpful context (e.g. sqlite
|
||||||
|
does not like being called from a different thread to the one it is from).
|
||||||
|
Thread-critical code will still be differentiated per-Task within a thread
|
||||||
|
as it is expected it does not like concurrent access.
|
||||||
|
|
||||||
|
This doesn't use contextvars as it needs to support 3.6. Once it can support
|
||||||
|
3.7 only, we can then reimplement the storage more nicely.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, thread_critical: bool = False) -> None:
|
||||||
|
self._thread_critical = thread_critical
|
||||||
|
self._thread_lock = threading.RLock()
|
||||||
|
self._context_refs: "weakref.WeakSet[object]" = weakref.WeakSet()
|
||||||
|
# Random suffixes stop accidental reuse between different Locals,
|
||||||
|
# though we try to force deletion as well.
|
||||||
|
self._attr_name = "_asgiref_local_impl_{}_{}".format(
|
||||||
|
id(self),
|
||||||
|
"".join(random.choice(string.ascii_letters) for i in range(8)),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_context_id(self):
|
||||||
|
"""
|
||||||
|
Get the ID we should use for looking up variables
|
||||||
|
"""
|
||||||
|
# Prevent a circular reference
|
||||||
|
from .sync import AsyncToSync, SyncToAsync
|
||||||
|
|
||||||
|
# First, pull the current task if we can
|
||||||
|
context_id = SyncToAsync.get_current_task()
|
||||||
|
context_is_async = True
|
||||||
|
# OK, let's try for a thread ID
|
||||||
|
if context_id is None:
|
||||||
|
context_id = threading.current_thread()
|
||||||
|
context_is_async = False
|
||||||
|
# If we're thread-critical, we stop here, as we can't share contexts.
|
||||||
|
if self._thread_critical:
|
||||||
|
return context_id
|
||||||
|
# Now, take those and see if we can resolve them through the launch maps
|
||||||
|
for i in range(sys.getrecursionlimit()):
|
||||||
|
try:
|
||||||
|
if context_is_async:
|
||||||
|
# Tasks have a source thread in AsyncToSync
|
||||||
|
context_id = AsyncToSync.launch_map[context_id]
|
||||||
|
context_is_async = False
|
||||||
|
else:
|
||||||
|
# Threads have a source task in SyncToAsync
|
||||||
|
context_id = SyncToAsync.launch_map[context_id]
|
||||||
|
context_is_async = True
|
||||||
|
except KeyError:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Catch infinite loops (they happen if you are screwing around
|
||||||
|
# with AsyncToSync implementations)
|
||||||
|
raise RuntimeError("Infinite launch_map loops")
|
||||||
|
return context_id
|
||||||
|
|
||||||
|
def _get_storage(self):
|
||||||
|
context_obj = self._get_context_id()
|
||||||
|
if not hasattr(context_obj, self._attr_name):
|
||||||
|
setattr(context_obj, self._attr_name, {})
|
||||||
|
self._context_refs.add(context_obj)
|
||||||
|
return getattr(context_obj, self._attr_name)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
for context_obj in self._context_refs:
|
||||||
|
try:
|
||||||
|
delattr(context_obj, self._attr_name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
except TypeError:
|
||||||
|
# WeakSet.__iter__ can crash when interpreter is shutting down due
|
||||||
|
# to _IterationGuard being None.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
with self._thread_lock:
|
||||||
|
storage = self._get_storage()
|
||||||
|
if key in storage:
|
||||||
|
return storage[key]
|
||||||
|
else:
|
||||||
|
raise AttributeError(f"{self!r} object has no attribute {key!r}")
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key in ("_context_refs", "_thread_critical", "_thread_lock", "_attr_name"):
|
||||||
|
return super().__setattr__(key, value)
|
||||||
|
with self._thread_lock:
|
||||||
|
storage = self._get_storage()
|
||||||
|
storage[key] = value
|
||||||
|
|
||||||
|
def __delattr__(self, key):
|
||||||
|
with self._thread_lock:
|
||||||
|
storage = self._get_storage()
|
||||||
|
if key in storage:
|
||||||
|
del storage[key]
|
||||||
|
else:
|
||||||
|
raise AttributeError(f"{self!r} object has no attribute {key!r}")
|
157
srcs/.venv/lib/python3.11/site-packages/asgiref/server.py
Normal file
157
srcs/.venv/lib/python3.11/site-packages/asgiref/server.py
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from .compatibility import guarantee_single_callable
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class StatelessServer:
|
||||||
|
"""
|
||||||
|
Base server class that handles basic concepts like application instance
|
||||||
|
creation/pooling, exception handling, and similar, for stateless protocols
|
||||||
|
(i.e. ones without actual incoming connections to the process)
|
||||||
|
|
||||||
|
Your code should override the handle() method, doing whatever it needs to,
|
||||||
|
and calling get_or_create_application_instance with a unique `scope_id`
|
||||||
|
and `scope` for the scope it wants to get.
|
||||||
|
|
||||||
|
If an application instance is found with the same `scope_id`, you are
|
||||||
|
given its input queue, otherwise one is made for you with the scope provided
|
||||||
|
and you are given that fresh new input queue. Either way, you should do
|
||||||
|
something like:
|
||||||
|
|
||||||
|
input_queue = self.get_or_create_application_instance(
|
||||||
|
"user-123456",
|
||||||
|
{"type": "testprotocol", "user_id": "123456", "username": "andrew"},
|
||||||
|
)
|
||||||
|
input_queue.put_nowait(message)
|
||||||
|
|
||||||
|
If you try and create an application instance and there are already
|
||||||
|
`max_application` instances, the oldest/least recently used one will be
|
||||||
|
reclaimed and shut down to make space.
|
||||||
|
|
||||||
|
Application coroutines that error will be found periodically (every 100ms
|
||||||
|
by default) and have their exceptions printed to the console. Override
|
||||||
|
application_exception() if you want to do more when this happens.
|
||||||
|
|
||||||
|
If you override run(), make sure you handle things like launching the
|
||||||
|
application checker.
|
||||||
|
"""
|
||||||
|
|
||||||
|
application_checker_interval = 0.1
|
||||||
|
|
||||||
|
def __init__(self, application, max_applications=1000):
|
||||||
|
# Parameters
|
||||||
|
self.application = application
|
||||||
|
self.max_applications = max_applications
|
||||||
|
# Initialisation
|
||||||
|
self.application_instances = {}
|
||||||
|
|
||||||
|
### Mainloop and handling
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Runs the asyncio event loop with our handler loop.
|
||||||
|
"""
|
||||||
|
event_loop = asyncio.get_event_loop()
|
||||||
|
asyncio.ensure_future(self.application_checker())
|
||||||
|
try:
|
||||||
|
event_loop.run_until_complete(self.handle())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info("Exiting due to Ctrl-C/interrupt")
|
||||||
|
|
||||||
|
async def handle(self):
|
||||||
|
raise NotImplementedError("You must implement handle()")
|
||||||
|
|
||||||
|
async def application_send(self, scope, message):
|
||||||
|
"""
|
||||||
|
Receives outbound sends from applications and handles them.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("You must implement application_send()")
|
||||||
|
|
||||||
|
### Application instance management
|
||||||
|
|
||||||
|
def get_or_create_application_instance(self, scope_id, scope):
|
||||||
|
"""
|
||||||
|
Creates an application instance and returns its queue.
|
||||||
|
"""
|
||||||
|
if scope_id in self.application_instances:
|
||||||
|
self.application_instances[scope_id]["last_used"] = time.time()
|
||||||
|
return self.application_instances[scope_id]["input_queue"]
|
||||||
|
# See if we need to delete an old one
|
||||||
|
while len(self.application_instances) > self.max_applications:
|
||||||
|
self.delete_oldest_application_instance()
|
||||||
|
# Make an instance of the application
|
||||||
|
input_queue = asyncio.Queue()
|
||||||
|
application_instance = guarantee_single_callable(self.application)
|
||||||
|
# Run it, and stash the future for later checking
|
||||||
|
future = asyncio.ensure_future(
|
||||||
|
application_instance(
|
||||||
|
scope=scope,
|
||||||
|
receive=input_queue.get,
|
||||||
|
send=lambda message: self.application_send(scope, message),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.application_instances[scope_id] = {
|
||||||
|
"input_queue": input_queue,
|
||||||
|
"future": future,
|
||||||
|
"scope": scope,
|
||||||
|
"last_used": time.time(),
|
||||||
|
}
|
||||||
|
return input_queue
|
||||||
|
|
||||||
|
def delete_oldest_application_instance(self):
|
||||||
|
"""
|
||||||
|
Finds and deletes the oldest application instance
|
||||||
|
"""
|
||||||
|
oldest_time = min(
|
||||||
|
details["last_used"] for details in self.application_instances.values()
|
||||||
|
)
|
||||||
|
for scope_id, details in self.application_instances.items():
|
||||||
|
if details["last_used"] == oldest_time:
|
||||||
|
self.delete_application_instance(scope_id)
|
||||||
|
# Return to make sure we only delete one in case two have
|
||||||
|
# the same oldest time
|
||||||
|
return
|
||||||
|
|
||||||
|
def delete_application_instance(self, scope_id):
|
||||||
|
"""
|
||||||
|
Removes an application instance (makes sure its task is stopped,
|
||||||
|
then removes it from the current set)
|
||||||
|
"""
|
||||||
|
details = self.application_instances[scope_id]
|
||||||
|
del self.application_instances[scope_id]
|
||||||
|
if not details["future"].done():
|
||||||
|
details["future"].cancel()
|
||||||
|
|
||||||
|
async def application_checker(self):
|
||||||
|
"""
|
||||||
|
Goes through the set of current application instance Futures and cleans up
|
||||||
|
any that are done/prints exceptions for any that errored.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(self.application_checker_interval)
|
||||||
|
for scope_id, details in list(self.application_instances.items()):
|
||||||
|
if details["future"].done():
|
||||||
|
exception = details["future"].exception()
|
||||||
|
if exception:
|
||||||
|
await self.application_exception(exception, details)
|
||||||
|
try:
|
||||||
|
del self.application_instances[scope_id]
|
||||||
|
except KeyError:
|
||||||
|
# Exception handling might have already got here before us. That's fine.
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def application_exception(self, exception, application_details):
|
||||||
|
"""
|
||||||
|
Called whenever an application coroutine has an exception.
|
||||||
|
"""
|
||||||
|
logging.error(
|
||||||
|
"Exception inside application: %s\n%s%s",
|
||||||
|
exception,
|
||||||
|
"".join(traceback.format_tb(exception.__traceback__)),
|
||||||
|
f" {exception}",
|
||||||
|
)
|
645
srcs/.venv/lib/python3.11/site-packages/asgiref/sync.py
Normal file
645
srcs/.venv/lib/python3.11/site-packages/asgiref/sync.py
Normal file
@ -0,0 +1,645 @@
|
|||||||
|
import asyncio
|
||||||
|
import asyncio.coroutines
|
||||||
|
import contextvars
|
||||||
|
import functools
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import warnings
|
||||||
|
import weakref
|
||||||
|
from concurrent.futures import Future, ThreadPoolExecutor
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Coroutine,
|
||||||
|
Dict,
|
||||||
|
Generic,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .current_thread_executor import CurrentThreadExecutor
|
||||||
|
from .local import Local
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import ParamSpec
|
||||||
|
else:
|
||||||
|
from typing_extensions import ParamSpec
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
# This is not available to import at runtime
|
||||||
|
from _typeshed import OptExcInfo
|
||||||
|
|
||||||
|
_F = TypeVar("_F", bound=Callable[..., Any])
|
||||||
|
_P = ParamSpec("_P")
|
||||||
|
_R = TypeVar("_R")
|
||||||
|
|
||||||
|
|
||||||
|
def _restore_context(context: contextvars.Context) -> None:
|
||||||
|
# Check for changes in contextvars, and set them to the current
|
||||||
|
# context for downstream consumers
|
||||||
|
for cvar in context:
|
||||||
|
cvalue = context.get(cvar)
|
||||||
|
try:
|
||||||
|
if cvar.get() != cvalue:
|
||||||
|
cvar.set(cvalue)
|
||||||
|
except LookupError:
|
||||||
|
cvar.set(cvalue)
|
||||||
|
|
||||||
|
|
||||||
|
# Python 3.12 deprecates asyncio.iscoroutinefunction() as an alias for
|
||||||
|
# inspect.iscoroutinefunction(), whilst also removing the _is_coroutine marker.
|
||||||
|
# The latter is replaced with the inspect.markcoroutinefunction decorator.
|
||||||
|
# Until 3.12 is the minimum supported Python version, provide a shim.
|
||||||
|
# Django 4.0 only supports 3.8+, so don't concern with the _or_partial backport.
|
||||||
|
|
||||||
|
if hasattr(inspect, "markcoroutinefunction"):
|
||||||
|
iscoroutinefunction = inspect.iscoroutinefunction
|
||||||
|
markcoroutinefunction: Callable[[_F], _F] = inspect.markcoroutinefunction
|
||||||
|
else:
|
||||||
|
iscoroutinefunction = asyncio.iscoroutinefunction # type: ignore[assignment]
|
||||||
|
|
||||||
|
def markcoroutinefunction(func: _F) -> _F:
|
||||||
|
func._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
_iscoroutinefunction_or_partial = iscoroutinefunction
|
||||||
|
else:
|
||||||
|
|
||||||
|
def _iscoroutinefunction_or_partial(func: Any) -> bool:
|
||||||
|
# Python < 3.8 does not correctly determine partially wrapped
|
||||||
|
# coroutine functions are coroutine functions, hence the need for
|
||||||
|
# this to exist. Code taken from CPython.
|
||||||
|
while inspect.ismethod(func):
|
||||||
|
func = func.__func__
|
||||||
|
while isinstance(func, functools.partial):
|
||||||
|
func = func.func
|
||||||
|
|
||||||
|
return iscoroutinefunction(func)
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadSensitiveContext:
|
||||||
|
"""Async context manager to manage context for thread sensitive mode
|
||||||
|
|
||||||
|
This context manager controls which thread pool executor is used when in
|
||||||
|
thread sensitive mode. By default, a single thread pool executor is shared
|
||||||
|
within a process.
|
||||||
|
|
||||||
|
In Python 3.7+, the ThreadSensitiveContext() context manager may be used to
|
||||||
|
specify a thread pool per context.
|
||||||
|
|
||||||
|
This context manager is re-entrant, so only the outer-most call to
|
||||||
|
ThreadSensitiveContext will set the context.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
>>> import time
|
||||||
|
>>> async with ThreadSensitiveContext():
|
||||||
|
... await sync_to_async(time.sleep, 1)()
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.token = None
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
try:
|
||||||
|
SyncToAsync.thread_sensitive_context.get()
|
||||||
|
except LookupError:
|
||||||
|
self.token = SyncToAsync.thread_sensitive_context.set(self)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc, value, tb):
|
||||||
|
if not self.token:
|
||||||
|
return
|
||||||
|
|
||||||
|
executor = SyncToAsync.context_to_thread_executor.pop(self, None)
|
||||||
|
if executor:
|
||||||
|
executor.shutdown()
|
||||||
|
SyncToAsync.thread_sensitive_context.reset(self.token)
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncToSync(Generic[_P, _R]):
|
||||||
|
"""
|
||||||
|
Utility class which turns an awaitable that only works on the thread with
|
||||||
|
the event loop into a synchronous callable that works in a subthread.
|
||||||
|
|
||||||
|
If the call stack contains an async loop, the code runs there.
|
||||||
|
Otherwise, the code runs in a new loop in a new thread.
|
||||||
|
|
||||||
|
Either way, this thread then pauses and waits to run any thread_sensitive
|
||||||
|
code called from further down the call stack using SyncToAsync, before
|
||||||
|
finally exiting once the async task returns.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Maps launched Tasks to the threads that launched them (for locals impl)
|
||||||
|
launch_map: "Dict[asyncio.Task[object], threading.Thread]" = {}
|
||||||
|
|
||||||
|
# Keeps track of which CurrentThreadExecutor to use. This uses an asgiref
|
||||||
|
# Local, not a threadlocal, so that tasks can work out what their parent used.
|
||||||
|
executors = Local()
|
||||||
|
|
||||||
|
# When we can't find a CurrentThreadExecutor from the context, such as
|
||||||
|
# inside create_task, we'll look it up here from the running event loop.
|
||||||
|
loop_thread_executors: "Dict[asyncio.AbstractEventLoop, CurrentThreadExecutor]" = {}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
awaitable: Union[
|
||||||
|
Callable[_P, Coroutine[Any, Any, _R]],
|
||||||
|
Callable[_P, Awaitable[_R]],
|
||||||
|
],
|
||||||
|
force_new_loop: bool = False,
|
||||||
|
):
|
||||||
|
if not callable(awaitable) or (
|
||||||
|
not _iscoroutinefunction_or_partial(awaitable)
|
||||||
|
and not _iscoroutinefunction_or_partial(
|
||||||
|
getattr(awaitable, "__call__", awaitable)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
# Python does not have very reliable detection of async functions
|
||||||
|
# (lots of false negatives) so this is just a warning.
|
||||||
|
warnings.warn(
|
||||||
|
"async_to_sync was passed a non-async-marked callable", stacklevel=2
|
||||||
|
)
|
||||||
|
self.awaitable = awaitable
|
||||||
|
try:
|
||||||
|
self.__self__ = self.awaitable.__self__ # type: ignore[union-attr]
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if force_new_loop:
|
||||||
|
# They have asked that we always run in a new sub-loop.
|
||||||
|
self.main_event_loop = None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.main_event_loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
# There's no event loop in this thread. Look for the threadlocal if
|
||||||
|
# we're inside SyncToAsync
|
||||||
|
main_event_loop_pid = getattr(
|
||||||
|
SyncToAsync.threadlocal, "main_event_loop_pid", None
|
||||||
|
)
|
||||||
|
# We make sure the parent loop is from the same process - if
|
||||||
|
# they've forked, this is not going to be valid any more (#194)
|
||||||
|
if main_event_loop_pid and main_event_loop_pid == os.getpid():
|
||||||
|
self.main_event_loop = getattr(
|
||||||
|
SyncToAsync.threadlocal, "main_event_loop", None
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.main_event_loop = None
|
||||||
|
|
||||||
|
def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R:
|
||||||
|
__traceback_hide__ = True # noqa: F841
|
||||||
|
|
||||||
|
# You can't call AsyncToSync from a thread with a running event loop
|
||||||
|
try:
|
||||||
|
event_loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if event_loop.is_running():
|
||||||
|
raise RuntimeError(
|
||||||
|
"You cannot use AsyncToSync in the same thread as an async event loop - "
|
||||||
|
"just await the async function directly."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wrapping context in list so it can be reassigned from within
|
||||||
|
# `main_wrap`.
|
||||||
|
context = [contextvars.copy_context()]
|
||||||
|
|
||||||
|
# Make a future for the return information
|
||||||
|
call_result: "Future[_R]" = Future()
|
||||||
|
# Get the source thread
|
||||||
|
source_thread = threading.current_thread()
|
||||||
|
# Make a CurrentThreadExecutor we'll use to idle in this thread - we
|
||||||
|
# need one for every sync frame, even if there's one above us in the
|
||||||
|
# same thread.
|
||||||
|
if hasattr(self.executors, "current"):
|
||||||
|
old_current_executor = self.executors.current
|
||||||
|
else:
|
||||||
|
old_current_executor = None
|
||||||
|
current_executor = CurrentThreadExecutor()
|
||||||
|
self.executors.current = current_executor
|
||||||
|
loop = None
|
||||||
|
# Use call_soon_threadsafe to schedule a synchronous callback on the
|
||||||
|
# main event loop's thread if it's there, otherwise make a new loop
|
||||||
|
# in this thread.
|
||||||
|
try:
|
||||||
|
awaitable = self.main_wrap(
|
||||||
|
call_result,
|
||||||
|
source_thread,
|
||||||
|
sys.exc_info(),
|
||||||
|
context,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not (self.main_event_loop and self.main_event_loop.is_running()):
|
||||||
|
# Make our own event loop - in a new thread - and run inside that.
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
self.loop_thread_executors[loop] = current_executor
|
||||||
|
loop_executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
loop_future = loop_executor.submit(
|
||||||
|
self._run_event_loop, loop, awaitable
|
||||||
|
)
|
||||||
|
if current_executor:
|
||||||
|
# Run the CurrentThreadExecutor until the future is done
|
||||||
|
current_executor.run_until_future(loop_future)
|
||||||
|
# Wait for future and/or allow for exception propagation
|
||||||
|
loop_future.result()
|
||||||
|
else:
|
||||||
|
# Call it inside the existing loop
|
||||||
|
self.main_event_loop.call_soon_threadsafe(
|
||||||
|
self.main_event_loop.create_task, awaitable
|
||||||
|
)
|
||||||
|
if current_executor:
|
||||||
|
# Run the CurrentThreadExecutor until the future is done
|
||||||
|
current_executor.run_until_future(call_result)
|
||||||
|
finally:
|
||||||
|
# Clean up any executor we were running
|
||||||
|
if loop is not None:
|
||||||
|
del self.loop_thread_executors[loop]
|
||||||
|
if hasattr(self.executors, "current"):
|
||||||
|
del self.executors.current
|
||||||
|
if old_current_executor:
|
||||||
|
self.executors.current = old_current_executor
|
||||||
|
_restore_context(context[0])
|
||||||
|
|
||||||
|
# Wait for results from the future.
|
||||||
|
return call_result.result()
|
||||||
|
|
||||||
|
def _run_event_loop(self, loop, coro):
|
||||||
|
"""
|
||||||
|
Runs the given event loop (designed to be called in a thread).
|
||||||
|
"""
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
loop.run_until_complete(coro)
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
# mimic asyncio.run() behavior
|
||||||
|
# cancel unexhausted async generators
|
||||||
|
tasks = asyncio.all_tasks(loop)
|
||||||
|
for task in tasks:
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
async def gather():
|
||||||
|
await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
|
||||||
|
loop.run_until_complete(gather())
|
||||||
|
for task in tasks:
|
||||||
|
if task.cancelled():
|
||||||
|
continue
|
||||||
|
if task.exception() is not None:
|
||||||
|
loop.call_exception_handler(
|
||||||
|
{
|
||||||
|
"message": "unhandled exception during loop shutdown",
|
||||||
|
"exception": task.exception(),
|
||||||
|
"task": task,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if hasattr(loop, "shutdown_asyncgens"):
|
||||||
|
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
asyncio.set_event_loop(self.main_event_loop)
|
||||||
|
|
||||||
|
def __get__(self, parent: Any, objtype: Any) -> Callable[_P, _R]:
|
||||||
|
"""
|
||||||
|
Include self for methods
|
||||||
|
"""
|
||||||
|
func = functools.partial(self.__call__, parent)
|
||||||
|
return functools.update_wrapper(func, self.awaitable)
|
||||||
|
|
||||||
|
async def main_wrap(
|
||||||
|
self,
|
||||||
|
call_result: "Future[_R]",
|
||||||
|
source_thread: threading.Thread,
|
||||||
|
exc_info: "OptExcInfo",
|
||||||
|
context: List[contextvars.Context],
|
||||||
|
*args: _P.args,
|
||||||
|
**kwargs: _P.kwargs,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Wraps the awaitable with something that puts the result into the
|
||||||
|
result/exception future.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__traceback_hide__ = True # noqa: F841
|
||||||
|
|
||||||
|
if context is not None:
|
||||||
|
_restore_context(context[0])
|
||||||
|
|
||||||
|
current_task = SyncToAsync.get_current_task()
|
||||||
|
assert current_task is not None
|
||||||
|
self.launch_map[current_task] = source_thread
|
||||||
|
try:
|
||||||
|
# If we have an exception, run the function inside the except block
|
||||||
|
# after raising it so exc_info is correctly populated.
|
||||||
|
if exc_info[1]:
|
||||||
|
try:
|
||||||
|
raise exc_info[1]
|
||||||
|
except BaseException:
|
||||||
|
result = await self.awaitable(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
result = await self.awaitable(*args, **kwargs)
|
||||||
|
except BaseException as e:
|
||||||
|
call_result.set_exception(e)
|
||||||
|
else:
|
||||||
|
call_result.set_result(result)
|
||||||
|
finally:
|
||||||
|
del self.launch_map[current_task]
|
||||||
|
|
||||||
|
context[0] = contextvars.copy_context()
|
||||||
|
|
||||||
|
|
||||||
|
class SyncToAsync(Generic[_P, _R]):
|
||||||
|
"""
|
||||||
|
Utility class which turns a synchronous callable into an awaitable that
|
||||||
|
runs in a threadpool. It also sets a threadlocal inside the thread so
|
||||||
|
calls to AsyncToSync can escape it.
|
||||||
|
|
||||||
|
If thread_sensitive is passed, the code will run in the same thread as any
|
||||||
|
outer code. This is needed for underlying Python code that is not
|
||||||
|
threadsafe (for example, code which handles SQLite database connections).
|
||||||
|
|
||||||
|
If the outermost program is async (i.e. SyncToAsync is outermost), then
|
||||||
|
this will be a dedicated single sub-thread that all sync code runs in,
|
||||||
|
one after the other. If the outermost program is sync (i.e. AsyncToSync is
|
||||||
|
outermost), this will just be the main thread. This is achieved by idling
|
||||||
|
with a CurrentThreadExecutor while AsyncToSync is blocking its sync parent,
|
||||||
|
rather than just blocking.
|
||||||
|
|
||||||
|
If executor is passed in, that will be used instead of the loop's default executor.
|
||||||
|
In order to pass in an executor, thread_sensitive must be set to False, otherwise
|
||||||
|
a TypeError will be raised.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Maps launched threads to the coroutines that spawned them
|
||||||
|
launch_map: "Dict[threading.Thread, asyncio.Task[object]]" = {}
|
||||||
|
|
||||||
|
# Storage for main event loop references
|
||||||
|
threadlocal = threading.local()
|
||||||
|
|
||||||
|
# Single-thread executor for thread-sensitive code
|
||||||
|
single_thread_executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
|
||||||
|
# Maintain a contextvar for the current execution context. Optionally used
|
||||||
|
# for thread sensitive mode.
|
||||||
|
thread_sensitive_context: "contextvars.ContextVar[ThreadSensitiveContext]" = (
|
||||||
|
contextvars.ContextVar("thread_sensitive_context")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Contextvar that is used to detect if the single thread executor
|
||||||
|
# would be awaited on while already being used in the same context
|
||||||
|
deadlock_context: "contextvars.ContextVar[bool]" = contextvars.ContextVar(
|
||||||
|
"deadlock_context"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Maintaining a weak reference to the context ensures that thread pools are
|
||||||
|
# erased once the context goes out of scope. This terminates the thread pool.
|
||||||
|
context_to_thread_executor: "weakref.WeakKeyDictionary[ThreadSensitiveContext, ThreadPoolExecutor]" = (
|
||||||
|
weakref.WeakKeyDictionary()
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
func: Callable[_P, _R],
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> None:
|
||||||
|
if (
|
||||||
|
not callable(func)
|
||||||
|
or _iscoroutinefunction_or_partial(func)
|
||||||
|
or _iscoroutinefunction_or_partial(getattr(func, "__call__", func))
|
||||||
|
):
|
||||||
|
raise TypeError("sync_to_async can only be applied to sync functions.")
|
||||||
|
self.func = func
|
||||||
|
functools.update_wrapper(self, func)
|
||||||
|
self._thread_sensitive = thread_sensitive
|
||||||
|
markcoroutinefunction(self)
|
||||||
|
if thread_sensitive and executor is not None:
|
||||||
|
raise TypeError("executor must not be set when thread_sensitive is True")
|
||||||
|
self._executor = executor
|
||||||
|
try:
|
||||||
|
self.__self__ = func.__self__ # type: ignore
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R:
|
||||||
|
__traceback_hide__ = True # noqa: F841
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
# Work out what thread to run the code in
|
||||||
|
if self._thread_sensitive:
|
||||||
|
if hasattr(AsyncToSync.executors, "current"):
|
||||||
|
# If we have a parent sync thread above somewhere, use that
|
||||||
|
executor = AsyncToSync.executors.current
|
||||||
|
elif self.thread_sensitive_context.get(None):
|
||||||
|
# If we have a way of retrieving the current context, attempt
|
||||||
|
# to use a per-context thread pool executor
|
||||||
|
thread_sensitive_context = self.thread_sensitive_context.get()
|
||||||
|
|
||||||
|
if thread_sensitive_context in self.context_to_thread_executor:
|
||||||
|
# Re-use thread executor in current context
|
||||||
|
executor = self.context_to_thread_executor[thread_sensitive_context]
|
||||||
|
else:
|
||||||
|
# Create new thread executor in current context
|
||||||
|
executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
self.context_to_thread_executor[thread_sensitive_context] = executor
|
||||||
|
elif loop in AsyncToSync.loop_thread_executors:
|
||||||
|
# Re-use thread executor for running loop
|
||||||
|
executor = AsyncToSync.loop_thread_executors[loop]
|
||||||
|
elif self.deadlock_context.get(False):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Single thread executor already being used, would deadlock"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Otherwise, we run it in a fixed single thread
|
||||||
|
executor = self.single_thread_executor
|
||||||
|
self.deadlock_context.set(True)
|
||||||
|
else:
|
||||||
|
# Use the passed in executor, or the loop's default if it is None
|
||||||
|
executor = self._executor
|
||||||
|
|
||||||
|
context = contextvars.copy_context()
|
||||||
|
child = functools.partial(self.func, *args, **kwargs)
|
||||||
|
func = context.run
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Run the code in the right thread
|
||||||
|
ret: _R = await loop.run_in_executor(
|
||||||
|
executor,
|
||||||
|
functools.partial(
|
||||||
|
self.thread_handler,
|
||||||
|
loop,
|
||||||
|
self.get_current_task(),
|
||||||
|
sys.exc_info(),
|
||||||
|
func,
|
||||||
|
child,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
_restore_context(context)
|
||||||
|
self.deadlock_context.set(False)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def __get__(
|
||||||
|
self, parent: Any, objtype: Any
|
||||||
|
) -> Callable[_P, Coroutine[Any, Any, _R]]:
|
||||||
|
"""
|
||||||
|
Include self for methods
|
||||||
|
"""
|
||||||
|
func = functools.partial(self.__call__, parent)
|
||||||
|
return functools.update_wrapper(func, self.func)
|
||||||
|
|
||||||
|
def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Wraps the sync application with exception handling.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__traceback_hide__ = True # noqa: F841
|
||||||
|
|
||||||
|
# Set the threadlocal for AsyncToSync
|
||||||
|
self.threadlocal.main_event_loop = loop
|
||||||
|
self.threadlocal.main_event_loop_pid = os.getpid()
|
||||||
|
# Set the task mapping (used for the locals module)
|
||||||
|
current_thread = threading.current_thread()
|
||||||
|
if AsyncToSync.launch_map.get(source_task) == current_thread:
|
||||||
|
# Our parent task was launched from this same thread, so don't make
|
||||||
|
# a launch map entry - let it shortcut over us! (and stop infinite loops)
|
||||||
|
parent_set = False
|
||||||
|
else:
|
||||||
|
self.launch_map[current_thread] = source_task
|
||||||
|
parent_set = True
|
||||||
|
source_task = (
|
||||||
|
None # allow the task to be garbage-collected in case of exceptions
|
||||||
|
)
|
||||||
|
# Run the function
|
||||||
|
try:
|
||||||
|
# If we have an exception, run the function inside the except block
|
||||||
|
# after raising it so exc_info is correctly populated.
|
||||||
|
if exc_info[1]:
|
||||||
|
try:
|
||||||
|
raise exc_info[1]
|
||||||
|
except BaseException:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
finally:
|
||||||
|
# Only delete the launch_map parent if we set it, otherwise it is
|
||||||
|
# from someone else.
|
||||||
|
if parent_set:
|
||||||
|
del self.launch_map[current_thread]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_current_task() -> Optional["asyncio.Task[Any]"]:
|
||||||
|
"""
|
||||||
|
Implementation of asyncio.current_task()
|
||||||
|
that returns None if there is no task.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return asyncio.current_task()
|
||||||
|
except RuntimeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def async_to_sync(
|
||||||
|
*,
|
||||||
|
force_new_loop: bool = False,
|
||||||
|
) -> Callable[
|
||||||
|
[Union[Callable[_P, Coroutine[Any, Any, _R]], Callable[_P, Awaitable[_R]]]],
|
||||||
|
Callable[_P, _R],
|
||||||
|
]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def async_to_sync(
|
||||||
|
awaitable: Union[
|
||||||
|
Callable[_P, Coroutine[Any, Any, _R]],
|
||||||
|
Callable[_P, Awaitable[_R]],
|
||||||
|
],
|
||||||
|
*,
|
||||||
|
force_new_loop: bool = False,
|
||||||
|
) -> Callable[_P, _R]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def async_to_sync(
|
||||||
|
awaitable: Optional[
|
||||||
|
Union[
|
||||||
|
Callable[_P, Coroutine[Any, Any, _R]],
|
||||||
|
Callable[_P, Awaitable[_R]],
|
||||||
|
]
|
||||||
|
] = None,
|
||||||
|
*,
|
||||||
|
force_new_loop: bool = False,
|
||||||
|
) -> Union[
|
||||||
|
Callable[
|
||||||
|
[Union[Callable[_P, Coroutine[Any, Any, _R]], Callable[_P, Awaitable[_R]]]],
|
||||||
|
Callable[_P, _R],
|
||||||
|
],
|
||||||
|
Callable[_P, _R],
|
||||||
|
]:
|
||||||
|
if awaitable is None:
|
||||||
|
return lambda f: AsyncToSync(
|
||||||
|
f,
|
||||||
|
force_new_loop=force_new_loop,
|
||||||
|
)
|
||||||
|
return AsyncToSync(
|
||||||
|
awaitable,
|
||||||
|
force_new_loop=force_new_loop,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sync_to_async(
|
||||||
|
*,
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> Callable[[Callable[_P, _R]], Callable[_P, Coroutine[Any, Any, _R]]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sync_to_async(
|
||||||
|
func: Callable[_P, _R],
|
||||||
|
*,
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> Callable[_P, Coroutine[Any, Any, _R]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def sync_to_async(
|
||||||
|
func: Optional[Callable[_P, _R]] = None,
|
||||||
|
*,
|
||||||
|
thread_sensitive: bool = True,
|
||||||
|
executor: Optional["ThreadPoolExecutor"] = None,
|
||||||
|
) -> Union[
|
||||||
|
Callable[[Callable[_P, _R]], Callable[_P, Coroutine[Any, Any, _R]]],
|
||||||
|
Callable[_P, Coroutine[Any, Any, _R]],
|
||||||
|
]:
|
||||||
|
if func is None:
|
||||||
|
return lambda f: SyncToAsync(
|
||||||
|
f,
|
||||||
|
thread_sensitive=thread_sensitive,
|
||||||
|
executor=executor,
|
||||||
|
)
|
||||||
|
return SyncToAsync(
|
||||||
|
func,
|
||||||
|
thread_sensitive=thread_sensitive,
|
||||||
|
executor=executor,
|
||||||
|
)
|
97
srcs/.venv/lib/python3.11/site-packages/asgiref/testing.py
Normal file
97
srcs/.venv/lib/python3.11/site-packages/asgiref/testing.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .compatibility import guarantee_single_callable
|
||||||
|
from .timeout import timeout as async_timeout
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationCommunicator:
|
||||||
|
"""
|
||||||
|
Runs an ASGI application in a test mode, allowing sending of
|
||||||
|
messages to it and retrieval of messages it sends.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, application, scope):
|
||||||
|
self.application = guarantee_single_callable(application)
|
||||||
|
self.scope = scope
|
||||||
|
self.input_queue = asyncio.Queue()
|
||||||
|
self.output_queue = asyncio.Queue()
|
||||||
|
self.future = asyncio.ensure_future(
|
||||||
|
self.application(scope, self.input_queue.get, self.output_queue.put)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def wait(self, timeout=1):
|
||||||
|
"""
|
||||||
|
Waits for the application to stop itself and returns any exceptions.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
async with async_timeout(timeout):
|
||||||
|
try:
|
||||||
|
await self.future
|
||||||
|
self.future.result()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
if not self.future.done():
|
||||||
|
self.future.cancel()
|
||||||
|
try:
|
||||||
|
await self.future
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def stop(self, exceptions=True):
|
||||||
|
if not self.future.done():
|
||||||
|
self.future.cancel()
|
||||||
|
elif exceptions:
|
||||||
|
# Give a chance to raise any exceptions
|
||||||
|
self.future.result()
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
# Clean up on deletion
|
||||||
|
try:
|
||||||
|
self.stop(exceptions=False)
|
||||||
|
except RuntimeError:
|
||||||
|
# Event loop already stopped
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def send_input(self, message):
|
||||||
|
"""
|
||||||
|
Sends a single message to the application
|
||||||
|
"""
|
||||||
|
# Give it the message
|
||||||
|
await self.input_queue.put(message)
|
||||||
|
|
||||||
|
async def receive_output(self, timeout=1):
|
||||||
|
"""
|
||||||
|
Receives a single message from the application, with optional timeout.
|
||||||
|
"""
|
||||||
|
# Make sure there's not an exception to raise from the task
|
||||||
|
if self.future.done():
|
||||||
|
self.future.result()
|
||||||
|
# Wait and receive the message
|
||||||
|
try:
|
||||||
|
async with async_timeout(timeout):
|
||||||
|
return await self.output_queue.get()
|
||||||
|
except asyncio.TimeoutError as e:
|
||||||
|
# See if we have another error to raise inside
|
||||||
|
if self.future.done():
|
||||||
|
self.future.result()
|
||||||
|
else:
|
||||||
|
self.future.cancel()
|
||||||
|
try:
|
||||||
|
await self.future
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
raise e
|
||||||
|
|
||||||
|
async def receive_nothing(self, timeout=0.1, interval=0.01):
|
||||||
|
"""
|
||||||
|
Checks that there is no message to receive in the given time.
|
||||||
|
"""
|
||||||
|
# `interval` has precedence over `timeout`
|
||||||
|
start = time.monotonic()
|
||||||
|
while time.monotonic() - start < timeout:
|
||||||
|
if not self.output_queue.empty():
|
||||||
|
return False
|
||||||
|
await asyncio.sleep(interval)
|
||||||
|
return self.output_queue.empty()
|
118
srcs/.venv/lib/python3.11/site-packages/asgiref/timeout.py
Normal file
118
srcs/.venv/lib/python3.11/site-packages/asgiref/timeout.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
# This code is originally sourced from the aio-libs project "async_timeout",
|
||||||
|
# under the Apache 2.0 license. You may see the original project at
|
||||||
|
# https://github.com/aio-libs/async-timeout
|
||||||
|
|
||||||
|
# It is vendored here to reduce chain-dependencies on this library, and
|
||||||
|
# modified slightly to remove some features we don't use.
|
||||||
|
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import warnings
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Any # noqa
|
||||||
|
from typing import Optional, Type
|
||||||
|
|
||||||
|
|
||||||
|
class timeout:
|
||||||
|
"""timeout context manager.
|
||||||
|
|
||||||
|
Useful in cases when you want to apply timeout logic around block
|
||||||
|
of code or in cases when asyncio.wait_for is not suitable. For example:
|
||||||
|
|
||||||
|
>>> with timeout(0.001):
|
||||||
|
... async with aiohttp.get('https://github.com') as r:
|
||||||
|
... await r.text()
|
||||||
|
|
||||||
|
|
||||||
|
timeout - value in seconds or None to disable timeout logic
|
||||||
|
loop - asyncio compatible event loop
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
timeout: Optional[float],
|
||||||
|
*,
|
||||||
|
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||||
|
) -> None:
|
||||||
|
self._timeout = timeout
|
||||||
|
if loop is None:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
else:
|
||||||
|
warnings.warn(
|
||||||
|
"""The loop argument to timeout() is deprecated.""", DeprecationWarning
|
||||||
|
)
|
||||||
|
self._loop = loop
|
||||||
|
self._task = None # type: Optional[asyncio.Task[Any]]
|
||||||
|
self._cancelled = False
|
||||||
|
self._cancel_handler = None # type: Optional[asyncio.Handle]
|
||||||
|
self._cancel_at = None # type: Optional[float]
|
||||||
|
|
||||||
|
def __enter__(self) -> "timeout":
|
||||||
|
return self._do_enter()
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: Type[BaseException],
|
||||||
|
exc_val: BaseException,
|
||||||
|
exc_tb: TracebackType,
|
||||||
|
) -> Optional[bool]:
|
||||||
|
self._do_exit(exc_type)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def __aenter__(self) -> "timeout":
|
||||||
|
return self._do_enter()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: Type[BaseException],
|
||||||
|
exc_val: BaseException,
|
||||||
|
exc_tb: TracebackType,
|
||||||
|
) -> None:
|
||||||
|
self._do_exit(exc_type)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def expired(self) -> bool:
|
||||||
|
return self._cancelled
|
||||||
|
|
||||||
|
@property
|
||||||
|
def remaining(self) -> Optional[float]:
|
||||||
|
if self._cancel_at is not None:
|
||||||
|
return max(self._cancel_at - self._loop.time(), 0.0)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _do_enter(self) -> "timeout":
|
||||||
|
# Support Tornado 5- without timeout
|
||||||
|
# Details: https://github.com/python/asyncio/issues/392
|
||||||
|
if self._timeout is None:
|
||||||
|
return self
|
||||||
|
|
||||||
|
self._task = asyncio.current_task(self._loop)
|
||||||
|
if self._task is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Timeout context manager should be used " "inside a task"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._timeout <= 0:
|
||||||
|
self._loop.call_soon(self._cancel_task)
|
||||||
|
return self
|
||||||
|
|
||||||
|
self._cancel_at = self._loop.time() + self._timeout
|
||||||
|
self._cancel_handler = self._loop.call_at(self._cancel_at, self._cancel_task)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _do_exit(self, exc_type: Type[BaseException]) -> None:
|
||||||
|
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||||
|
self._cancel_handler = None
|
||||||
|
self._task = None
|
||||||
|
raise asyncio.TimeoutError
|
||||||
|
if self._timeout is not None and self._cancel_handler is not None:
|
||||||
|
self._cancel_handler.cancel()
|
||||||
|
self._cancel_handler = None
|
||||||
|
self._task = None
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _cancel_task(self) -> None:
|
||||||
|
if self._task is not None:
|
||||||
|
self._task.cancel()
|
||||||
|
self._cancelled = True
|
274
srcs/.venv/lib/python3.11/site-packages/asgiref/typing.py
Normal file
274
srcs/.venv/lib/python3.11/site-packages/asgiref/typing.py
Normal file
@ -0,0 +1,274 @@
|
|||||||
|
import sys
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Literal, Protocol, TypedDict
|
||||||
|
else:
|
||||||
|
from typing_extensions import Literal, Protocol, TypedDict
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import NotRequired
|
||||||
|
else:
|
||||||
|
from typing_extensions import NotRequired
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"ASGIVersions",
|
||||||
|
"HTTPScope",
|
||||||
|
"WebSocketScope",
|
||||||
|
"LifespanScope",
|
||||||
|
"WWWScope",
|
||||||
|
"Scope",
|
||||||
|
"HTTPRequestEvent",
|
||||||
|
"HTTPResponseStartEvent",
|
||||||
|
"HTTPResponseBodyEvent",
|
||||||
|
"HTTPResponseTrailersEvent",
|
||||||
|
"HTTPServerPushEvent",
|
||||||
|
"HTTPDisconnectEvent",
|
||||||
|
"WebSocketConnectEvent",
|
||||||
|
"WebSocketAcceptEvent",
|
||||||
|
"WebSocketReceiveEvent",
|
||||||
|
"WebSocketSendEvent",
|
||||||
|
"WebSocketResponseStartEvent",
|
||||||
|
"WebSocketResponseBodyEvent",
|
||||||
|
"WebSocketDisconnectEvent",
|
||||||
|
"WebSocketCloseEvent",
|
||||||
|
"LifespanStartupEvent",
|
||||||
|
"LifespanShutdownEvent",
|
||||||
|
"LifespanStartupCompleteEvent",
|
||||||
|
"LifespanStartupFailedEvent",
|
||||||
|
"LifespanShutdownCompleteEvent",
|
||||||
|
"LifespanShutdownFailedEvent",
|
||||||
|
"ASGIReceiveEvent",
|
||||||
|
"ASGISendEvent",
|
||||||
|
"ASGIReceiveCallable",
|
||||||
|
"ASGISendCallable",
|
||||||
|
"ASGI2Protocol",
|
||||||
|
"ASGI2Application",
|
||||||
|
"ASGI3Application",
|
||||||
|
"ASGIApplication",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ASGIVersions(TypedDict):
|
||||||
|
spec_version: str
|
||||||
|
version: Union[Literal["2.0"], Literal["3.0"]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPScope(TypedDict):
|
||||||
|
type: Literal["http"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
method: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketScope(TypedDict):
|
||||||
|
type: Literal["websocket"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
subprotocols: Iterable[str]
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanScope(TypedDict):
|
||||||
|
type: Literal["lifespan"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
|
WWWScope = Union[HTTPScope, WebSocketScope]
|
||||||
|
Scope = Union[HTTPScope, WebSocketScope, LifespanScope]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPRequestEvent(TypedDict):
|
||||||
|
type: Literal["http.request"]
|
||||||
|
body: bytes
|
||||||
|
more_body: bool
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponseDebugEvent(TypedDict):
|
||||||
|
type: Literal["http.response.debug"]
|
||||||
|
info: Dict[str, object]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponseStartEvent(TypedDict):
|
||||||
|
type: Literal["http.response.start"]
|
||||||
|
status: int
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
trailers: bool
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponseBodyEvent(TypedDict):
|
||||||
|
type: Literal["http.response.body"]
|
||||||
|
body: bytes
|
||||||
|
more_body: bool
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponseTrailersEvent(TypedDict):
|
||||||
|
type: Literal["http.response.trailers"]
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
more_trailers: bool
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPServerPushEvent(TypedDict):
|
||||||
|
type: Literal["http.response.push"]
|
||||||
|
path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPDisconnectEvent(TypedDict):
|
||||||
|
type: Literal["http.disconnect"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketConnectEvent(TypedDict):
|
||||||
|
type: Literal["websocket.connect"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketAcceptEvent(TypedDict):
|
||||||
|
type: Literal["websocket.accept"]
|
||||||
|
subprotocol: Optional[str]
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketReceiveEvent(TypedDict):
|
||||||
|
type: Literal["websocket.receive"]
|
||||||
|
bytes: Optional[bytes]
|
||||||
|
text: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketSendEvent(TypedDict):
|
||||||
|
type: Literal["websocket.send"]
|
||||||
|
bytes: Optional[bytes]
|
||||||
|
text: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketResponseStartEvent(TypedDict):
|
||||||
|
type: Literal["websocket.http.response.start"]
|
||||||
|
status: int
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketResponseBodyEvent(TypedDict):
|
||||||
|
type: Literal["websocket.http.response.body"]
|
||||||
|
body: bytes
|
||||||
|
more_body: bool
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketDisconnectEvent(TypedDict):
|
||||||
|
type: Literal["websocket.disconnect"]
|
||||||
|
code: int
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketCloseEvent(TypedDict):
|
||||||
|
type: Literal["websocket.close"]
|
||||||
|
code: int
|
||||||
|
reason: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanStartupEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.startup"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanShutdownEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.shutdown"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanStartupCompleteEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.startup.complete"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanStartupFailedEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.startup.failed"]
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanShutdownCompleteEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.shutdown.complete"]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanShutdownFailedEvent(TypedDict):
|
||||||
|
type: Literal["lifespan.shutdown.failed"]
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
ASGIReceiveEvent = Union[
|
||||||
|
HTTPRequestEvent,
|
||||||
|
HTTPDisconnectEvent,
|
||||||
|
WebSocketConnectEvent,
|
||||||
|
WebSocketReceiveEvent,
|
||||||
|
WebSocketDisconnectEvent,
|
||||||
|
LifespanStartupEvent,
|
||||||
|
LifespanShutdownEvent,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
ASGISendEvent = Union[
|
||||||
|
HTTPResponseStartEvent,
|
||||||
|
HTTPResponseBodyEvent,
|
||||||
|
HTTPResponseTrailersEvent,
|
||||||
|
HTTPServerPushEvent,
|
||||||
|
HTTPDisconnectEvent,
|
||||||
|
WebSocketAcceptEvent,
|
||||||
|
WebSocketSendEvent,
|
||||||
|
WebSocketResponseStartEvent,
|
||||||
|
WebSocketResponseBodyEvent,
|
||||||
|
WebSocketCloseEvent,
|
||||||
|
LifespanStartupCompleteEvent,
|
||||||
|
LifespanStartupFailedEvent,
|
||||||
|
LifespanShutdownCompleteEvent,
|
||||||
|
LifespanShutdownFailedEvent,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
ASGIReceiveCallable = Callable[[], Awaitable[ASGIReceiveEvent]]
|
||||||
|
ASGISendCallable = Callable[[ASGISendEvent], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
|
class ASGI2Protocol(Protocol):
|
||||||
|
def __init__(self, scope: Scope) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def __call__(
|
||||||
|
self, receive: ASGIReceiveCallable, send: ASGISendCallable
|
||||||
|
) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
ASGI2Application = Type[ASGI2Protocol]
|
||||||
|
ASGI3Application = Callable[
|
||||||
|
[
|
||||||
|
Scope,
|
||||||
|
ASGIReceiveCallable,
|
||||||
|
ASGISendCallable,
|
||||||
|
],
|
||||||
|
Awaitable[None],
|
||||||
|
]
|
||||||
|
ASGIApplication = Union[ASGI2Application, ASGI3Application]
|
162
srcs/.venv/lib/python3.11/site-packages/asgiref/wsgi.py
Normal file
162
srcs/.venv/lib/python3.11/site-packages/asgiref/wsgi.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
from io import BytesIO
|
||||||
|
from tempfile import SpooledTemporaryFile
|
||||||
|
|
||||||
|
from asgiref.sync import AsyncToSync, sync_to_async
|
||||||
|
|
||||||
|
|
||||||
|
class WsgiToAsgi:
|
||||||
|
"""
|
||||||
|
Wraps a WSGI application to make it into an ASGI application.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, wsgi_application):
|
||||||
|
self.wsgi_application = wsgi_application
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive, send):
|
||||||
|
"""
|
||||||
|
ASGI application instantiation point.
|
||||||
|
We return a new WsgiToAsgiInstance here with the WSGI app
|
||||||
|
and the scope, ready to respond when it is __call__ed.
|
||||||
|
"""
|
||||||
|
await WsgiToAsgiInstance(self.wsgi_application)(scope, receive, send)
|
||||||
|
|
||||||
|
|
||||||
|
class WsgiToAsgiInstance:
|
||||||
|
"""
|
||||||
|
Per-socket instance of a wrapped WSGI application
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, wsgi_application):
|
||||||
|
self.wsgi_application = wsgi_application
|
||||||
|
self.response_started = False
|
||||||
|
self.response_content_length = None
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive, send):
|
||||||
|
if scope["type"] != "http":
|
||||||
|
raise ValueError("WSGI wrapper received a non-HTTP scope")
|
||||||
|
self.scope = scope
|
||||||
|
with SpooledTemporaryFile(max_size=65536) as body:
|
||||||
|
# Alright, wait for the http.request messages
|
||||||
|
while True:
|
||||||
|
message = await receive()
|
||||||
|
if message["type"] != "http.request":
|
||||||
|
raise ValueError("WSGI wrapper received a non-HTTP-request message")
|
||||||
|
body.write(message.get("body", b""))
|
||||||
|
if not message.get("more_body"):
|
||||||
|
break
|
||||||
|
body.seek(0)
|
||||||
|
# Wrap send so it can be called from the subthread
|
||||||
|
self.sync_send = AsyncToSync(send)
|
||||||
|
# Call the WSGI app
|
||||||
|
await self.run_wsgi_app(body)
|
||||||
|
|
||||||
|
def build_environ(self, scope, body):
|
||||||
|
"""
|
||||||
|
Builds a scope and request body into a WSGI environ object.
|
||||||
|
"""
|
||||||
|
environ = {
|
||||||
|
"REQUEST_METHOD": scope["method"],
|
||||||
|
"SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"),
|
||||||
|
"PATH_INFO": scope["path"].encode("utf8").decode("latin1"),
|
||||||
|
"QUERY_STRING": scope["query_string"].decode("ascii"),
|
||||||
|
"SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"],
|
||||||
|
"wsgi.version": (1, 0),
|
||||||
|
"wsgi.url_scheme": scope.get("scheme", "http"),
|
||||||
|
"wsgi.input": body,
|
||||||
|
"wsgi.errors": BytesIO(),
|
||||||
|
"wsgi.multithread": True,
|
||||||
|
"wsgi.multiprocess": True,
|
||||||
|
"wsgi.run_once": False,
|
||||||
|
}
|
||||||
|
# Get server name and port - required in WSGI, not in ASGI
|
||||||
|
if "server" in scope:
|
||||||
|
environ["SERVER_NAME"] = scope["server"][0]
|
||||||
|
environ["SERVER_PORT"] = str(scope["server"][1])
|
||||||
|
else:
|
||||||
|
environ["SERVER_NAME"] = "localhost"
|
||||||
|
environ["SERVER_PORT"] = "80"
|
||||||
|
|
||||||
|
if "client" in scope:
|
||||||
|
environ["REMOTE_ADDR"] = scope["client"][0]
|
||||||
|
|
||||||
|
# Go through headers and make them into environ entries
|
||||||
|
for name, value in self.scope.get("headers", []):
|
||||||
|
name = name.decode("latin1")
|
||||||
|
if name == "content-length":
|
||||||
|
corrected_name = "CONTENT_LENGTH"
|
||||||
|
elif name == "content-type":
|
||||||
|
corrected_name = "CONTENT_TYPE"
|
||||||
|
else:
|
||||||
|
corrected_name = "HTTP_%s" % name.upper().replace("-", "_")
|
||||||
|
# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in case
|
||||||
|
value = value.decode("latin1")
|
||||||
|
if corrected_name in environ:
|
||||||
|
value = environ[corrected_name] + "," + value
|
||||||
|
environ[corrected_name] = value
|
||||||
|
return environ
|
||||||
|
|
||||||
|
def start_response(self, status, response_headers, exc_info=None):
|
||||||
|
"""
|
||||||
|
WSGI start_response callable.
|
||||||
|
"""
|
||||||
|
# Don't allow re-calling once response has begun
|
||||||
|
if self.response_started:
|
||||||
|
raise exc_info[1].with_traceback(exc_info[2])
|
||||||
|
# Don't allow re-calling without exc_info
|
||||||
|
if hasattr(self, "response_start") and exc_info is None:
|
||||||
|
raise ValueError(
|
||||||
|
"You cannot call start_response a second time without exc_info"
|
||||||
|
)
|
||||||
|
# Extract status code
|
||||||
|
status_code, _ = status.split(" ", 1)
|
||||||
|
status_code = int(status_code)
|
||||||
|
# Extract headers
|
||||||
|
headers = [
|
||||||
|
(name.lower().encode("ascii"), value.encode("ascii"))
|
||||||
|
for name, value in response_headers
|
||||||
|
]
|
||||||
|
# Extract content-length
|
||||||
|
self.response_content_length = None
|
||||||
|
for name, value in response_headers:
|
||||||
|
if name.lower() == "content-length":
|
||||||
|
self.response_content_length = int(value)
|
||||||
|
# Build and send response start message.
|
||||||
|
self.response_start = {
|
||||||
|
"type": "http.response.start",
|
||||||
|
"status": status_code,
|
||||||
|
"headers": headers,
|
||||||
|
}
|
||||||
|
|
||||||
|
@sync_to_async
|
||||||
|
def run_wsgi_app(self, body):
|
||||||
|
"""
|
||||||
|
Called in a subthread to run the WSGI app. We encapsulate like
|
||||||
|
this so that the start_response callable is called in the same thread.
|
||||||
|
"""
|
||||||
|
# Translate the scope and incoming request body into a WSGI environ
|
||||||
|
environ = self.build_environ(self.scope, body)
|
||||||
|
# Run the WSGI app
|
||||||
|
bytes_sent = 0
|
||||||
|
for output in self.wsgi_application(environ, self.start_response):
|
||||||
|
# If this is the first response, include the response headers
|
||||||
|
if not self.response_started:
|
||||||
|
self.response_started = True
|
||||||
|
self.sync_send(self.response_start)
|
||||||
|
# If the application supplies a Content-Length header
|
||||||
|
if self.response_content_length is not None:
|
||||||
|
# The server should not transmit more bytes to the client than the header allows
|
||||||
|
bytes_allowed = self.response_content_length - bytes_sent
|
||||||
|
if len(output) > bytes_allowed:
|
||||||
|
output = output[:bytes_allowed]
|
||||||
|
self.sync_send(
|
||||||
|
{"type": "http.response.body", "body": output, "more_body": True}
|
||||||
|
)
|
||||||
|
bytes_sent += len(output)
|
||||||
|
# The server should stop iterating over the response when enough data has been sent
|
||||||
|
if bytes_sent == self.response_content_length:
|
||||||
|
break
|
||||||
|
# Close connection
|
||||||
|
if not self.response_started:
|
||||||
|
self.response_started = True
|
||||||
|
self.sync_send(self.response_start)
|
||||||
|
self.sync_send({"type": "http.response.body"})
|
15
srcs/.venv/lib/python3.11/site-packages/corsheaders/apps.py
Normal file
15
srcs/.venv/lib/python3.11/site-packages/corsheaders/apps.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from django.apps import AppConfig
|
||||||
|
from django.core.checks import register
|
||||||
|
from django.core.checks import Tags
|
||||||
|
|
||||||
|
from corsheaders.checks import check_settings
|
||||||
|
|
||||||
|
|
||||||
|
class CorsHeadersAppConfig(AppConfig):
|
||||||
|
name = "corsheaders"
|
||||||
|
verbose_name = "django-cors-headers"
|
||||||
|
|
||||||
|
def ready(self) -> None:
|
||||||
|
register(Tags.security)(check_settings)
|
178
srcs/.venv/lib/python3.11/site-packages/corsheaders/checks.py
Normal file
178
srcs/.venv/lib/python3.11/site-packages/corsheaders/checks.py
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from collections.abc import Sequence
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.checks import CheckMessage
|
||||||
|
from django.core.checks import Error
|
||||||
|
|
||||||
|
from corsheaders.conf import conf
|
||||||
|
|
||||||
|
re_type = type(re.compile(""))
|
||||||
|
|
||||||
|
|
||||||
|
def check_settings(**kwargs: Any) -> list[CheckMessage]:
|
||||||
|
errors: list[CheckMessage] = []
|
||||||
|
|
||||||
|
if not is_sequence(conf.CORS_ALLOW_HEADERS, str):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"CORS_ALLOW_HEADERS should be a sequence of strings.",
|
||||||
|
id="corsheaders.E001",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not is_sequence(conf.CORS_ALLOW_METHODS, str):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"CORS_ALLOW_METHODS should be a sequence of strings.",
|
||||||
|
id="corsheaders.E002",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(conf.CORS_ALLOW_CREDENTIALS, bool):
|
||||||
|
errors.append( # type: ignore [unreachable]
|
||||||
|
Error("CORS_ALLOW_CREDENTIALS should be a bool.", id="corsheaders.E003")
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(conf.CORS_ALLOW_PRIVATE_NETWORK, bool):
|
||||||
|
errors.append( # type: ignore [unreachable]
|
||||||
|
Error(
|
||||||
|
"CORS_ALLOW_PRIVATE_NETWORK should be a bool.",
|
||||||
|
id="corsheaders.E015",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not isinstance(conf.CORS_PREFLIGHT_MAX_AGE, int)
|
||||||
|
or conf.CORS_PREFLIGHT_MAX_AGE < 0
|
||||||
|
):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
(
|
||||||
|
"CORS_PREFLIGHT_MAX_AGE should be an integer greater than "
|
||||||
|
+ "or equal to zero."
|
||||||
|
),
|
||||||
|
id="corsheaders.E004",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(conf.CORS_ALLOW_ALL_ORIGINS, bool):
|
||||||
|
if hasattr(settings, "CORS_ALLOW_ALL_ORIGINS"): # type: ignore [unreachable]
|
||||||
|
allow_all_alias = "CORS_ALLOW_ALL_ORIGINS"
|
||||||
|
else:
|
||||||
|
allow_all_alias = "CORS_ORIGIN_ALLOW_ALL"
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
f"{allow_all_alias} should be a bool.",
|
||||||
|
id="corsheaders.E005",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(settings, "CORS_ALLOWED_ORIGINS"):
|
||||||
|
allowed_origins_alias = "CORS_ALLOWED_ORIGINS"
|
||||||
|
else:
|
||||||
|
allowed_origins_alias = "CORS_ORIGIN_WHITELIST"
|
||||||
|
|
||||||
|
if not is_sequence(conf.CORS_ALLOWED_ORIGINS, str):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
f"{allowed_origins_alias} should be a sequence of strings.",
|
||||||
|
id="corsheaders.E006",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
special_origin_values = (
|
||||||
|
# From 'security sensitive' contexts
|
||||||
|
"null",
|
||||||
|
# From files on Chrome on Android
|
||||||
|
# https://bugs.chromium.org/p/chromium/issues/detail?id=991107
|
||||||
|
"file://",
|
||||||
|
)
|
||||||
|
for origin in conf.CORS_ALLOWED_ORIGINS:
|
||||||
|
if origin in special_origin_values:
|
||||||
|
continue
|
||||||
|
parsed = urlsplit(origin)
|
||||||
|
if parsed.scheme == "" or parsed.netloc == "":
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"Origin {} in {} is missing scheme or netloc".format(
|
||||||
|
repr(origin), allowed_origins_alias
|
||||||
|
),
|
||||||
|
id="corsheaders.E013",
|
||||||
|
hint=(
|
||||||
|
"Add a scheme (e.g. https://) or netloc (e.g. "
|
||||||
|
+ "example.com)."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Only do this check in this case because if the scheme is not
|
||||||
|
# provided, netloc ends up in path
|
||||||
|
for part in ("path", "query", "fragment"):
|
||||||
|
if getattr(parsed, part) != "":
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"Origin {} in {} should not have {}".format(
|
||||||
|
repr(origin), allowed_origins_alias, part
|
||||||
|
),
|
||||||
|
id="corsheaders.E014",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(settings, "CORS_ALLOWED_ORIGIN_REGEXES"):
|
||||||
|
allowed_regexes_alias = "CORS_ALLOWED_ORIGIN_REGEXES"
|
||||||
|
else:
|
||||||
|
allowed_regexes_alias = "CORS_ORIGIN_REGEX_WHITELIST"
|
||||||
|
if not is_sequence(conf.CORS_ALLOWED_ORIGIN_REGEXES, (str, re_type)):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"{} should be a sequence of strings and/or compiled regexes.".format(
|
||||||
|
allowed_regexes_alias
|
||||||
|
),
|
||||||
|
id="corsheaders.E007",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not is_sequence(conf.CORS_EXPOSE_HEADERS, str):
|
||||||
|
errors.append(
|
||||||
|
Error("CORS_EXPOSE_HEADERS should be a sequence.", id="corsheaders.E008")
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(conf.CORS_URLS_REGEX, (str, re_type)):
|
||||||
|
errors.append(
|
||||||
|
Error("CORS_URLS_REGEX should be a string or regex.", id="corsheaders.E009")
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(settings, "CORS_MODEL"):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
(
|
||||||
|
"The CORS_MODEL setting has been removed - see "
|
||||||
|
+ "django-cors-headers' HISTORY."
|
||||||
|
),
|
||||||
|
id="corsheaders.E012",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(settings, "CORS_REPLACE_HTTPS_REFERER"):
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
(
|
||||||
|
"The CORS_REPLACE_HTTPS_REFERER setting has been removed"
|
||||||
|
+ " - see django-cors-headers' CHANGELOG."
|
||||||
|
),
|
||||||
|
id="corsheaders.E013",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def is_sequence(thing: Any, type_or_types: type[Any] | tuple[type[Any], ...]) -> bool:
|
||||||
|
return isinstance(thing, Sequence) and all(
|
||||||
|
isinstance(x, type_or_types) for x in thing
|
||||||
|
)
|
75
srcs/.venv/lib/python3.11/site-packages/corsheaders/conf.py
Normal file
75
srcs/.venv/lib/python3.11/site-packages/corsheaders/conf.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import cast
|
||||||
|
from typing import List
|
||||||
|
from typing import Pattern
|
||||||
|
from typing import Sequence
|
||||||
|
from typing import Tuple
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from corsheaders.defaults import default_headers
|
||||||
|
from corsheaders.defaults import default_methods
|
||||||
|
|
||||||
|
|
||||||
|
class Settings:
|
||||||
|
"""
|
||||||
|
Shadow Django's settings with a little logic
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOW_HEADERS(self) -> Sequence[str]:
|
||||||
|
return getattr(settings, "CORS_ALLOW_HEADERS", default_headers)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOW_METHODS(self) -> Sequence[str]:
|
||||||
|
return getattr(settings, "CORS_ALLOW_METHODS", default_methods)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOW_CREDENTIALS(self) -> bool:
|
||||||
|
return getattr(settings, "CORS_ALLOW_CREDENTIALS", False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOW_PRIVATE_NETWORK(self) -> bool:
|
||||||
|
return getattr(settings, "CORS_ALLOW_PRIVATE_NETWORK", False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_PREFLIGHT_MAX_AGE(self) -> int:
|
||||||
|
return getattr(settings, "CORS_PREFLIGHT_MAX_AGE", 86400)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOW_ALL_ORIGINS(self) -> bool:
|
||||||
|
return getattr(
|
||||||
|
settings,
|
||||||
|
"CORS_ALLOW_ALL_ORIGINS",
|
||||||
|
getattr(settings, "CORS_ORIGIN_ALLOW_ALL", False),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOWED_ORIGINS(self) -> list[str] | tuple[str]:
|
||||||
|
value = getattr(
|
||||||
|
settings,
|
||||||
|
"CORS_ALLOWED_ORIGINS",
|
||||||
|
getattr(settings, "CORS_ORIGIN_WHITELIST", ()),
|
||||||
|
)
|
||||||
|
return cast(Union[List[str], Tuple[str]], value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_ALLOWED_ORIGIN_REGEXES(self) -> Sequence[str | Pattern[str]]:
|
||||||
|
return getattr(
|
||||||
|
settings,
|
||||||
|
"CORS_ALLOWED_ORIGIN_REGEXES",
|
||||||
|
getattr(settings, "CORS_ORIGIN_REGEX_WHITELIST", ()),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_EXPOSE_HEADERS(self) -> Sequence[str]:
|
||||||
|
return getattr(settings, "CORS_EXPOSE_HEADERS", ())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def CORS_URLS_REGEX(self) -> str | Pattern[str]:
|
||||||
|
return getattr(settings, "CORS_URLS_REGEX", r"^.*$")
|
||||||
|
|
||||||
|
|
||||||
|
conf = Settings()
|
@ -0,0 +1,21 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
# Kept here for backwards compatibility
|
||||||
|
|
||||||
|
default_headers = (
|
||||||
|
"accept",
|
||||||
|
"authorization",
|
||||||
|
"content-type",
|
||||||
|
"user-agent",
|
||||||
|
"x-csrftoken",
|
||||||
|
"x-requested-with",
|
||||||
|
)
|
||||||
|
|
||||||
|
default_methods = (
|
||||||
|
"DELETE",
|
||||||
|
"GET",
|
||||||
|
"OPTIONS",
|
||||||
|
"PATCH",
|
||||||
|
"POST",
|
||||||
|
"PUT",
|
||||||
|
)
|
@ -0,0 +1,169 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import re
|
||||||
|
from typing import Awaitable
|
||||||
|
from typing import Callable
|
||||||
|
from urllib.parse import SplitResult
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from django.http import HttpRequest
|
||||||
|
from django.http import HttpResponse
|
||||||
|
from django.http.response import HttpResponseBase
|
||||||
|
from django.utils.cache import patch_vary_headers
|
||||||
|
|
||||||
|
from corsheaders.conf import conf
|
||||||
|
from corsheaders.signals import check_request_enabled
|
||||||
|
|
||||||
|
ACCESS_CONTROL_ALLOW_ORIGIN = "access-control-allow-origin"
|
||||||
|
ACCESS_CONTROL_EXPOSE_HEADERS = "access-control-expose-headers"
|
||||||
|
ACCESS_CONTROL_ALLOW_CREDENTIALS = "access-control-allow-credentials"
|
||||||
|
ACCESS_CONTROL_ALLOW_HEADERS = "access-control-allow-headers"
|
||||||
|
ACCESS_CONTROL_ALLOW_METHODS = "access-control-allow-methods"
|
||||||
|
ACCESS_CONTROL_MAX_AGE = "access-control-max-age"
|
||||||
|
ACCESS_CONTROL_REQUEST_PRIVATE_NETWORK = "access-control-request-private-network"
|
||||||
|
ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK = "access-control-allow-private-network"
|
||||||
|
|
||||||
|
|
||||||
|
class CorsMiddleware:
|
||||||
|
sync_capable = True
|
||||||
|
async_capable = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
get_response: (
|
||||||
|
Callable[[HttpRequest], HttpResponseBase]
|
||||||
|
| Callable[[HttpRequest], Awaitable[HttpResponseBase]]
|
||||||
|
),
|
||||||
|
) -> None:
|
||||||
|
self.get_response = get_response
|
||||||
|
if asyncio.iscoroutinefunction(self.get_response):
|
||||||
|
# Mark the class as async-capable, but do the actual switch
|
||||||
|
# inside __call__ to avoid swapping out dunder methods
|
||||||
|
self._is_coroutine = (
|
||||||
|
asyncio.coroutines._is_coroutine # type: ignore [attr-defined]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._is_coroutine = None
|
||||||
|
|
||||||
|
def __call__(
|
||||||
|
self, request: HttpRequest
|
||||||
|
) -> HttpResponseBase | Awaitable[HttpResponseBase]:
|
||||||
|
if self._is_coroutine:
|
||||||
|
return self.__acall__(request)
|
||||||
|
response: HttpResponseBase | None = self.check_preflight(request)
|
||||||
|
if response is None:
|
||||||
|
result = self.get_response(request)
|
||||||
|
assert isinstance(result, HttpResponseBase)
|
||||||
|
response = result
|
||||||
|
self.add_response_headers(request, response)
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def __acall__(self, request: HttpRequest) -> HttpResponseBase:
|
||||||
|
response = self.check_preflight(request)
|
||||||
|
if response is None:
|
||||||
|
result = self.get_response(request)
|
||||||
|
assert not isinstance(result, HttpResponseBase)
|
||||||
|
response = await result
|
||||||
|
self.add_response_headers(request, response)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def check_preflight(self, request: HttpRequest) -> HttpResponseBase | None:
|
||||||
|
"""
|
||||||
|
Generate a response for CORS preflight requests.
|
||||||
|
"""
|
||||||
|
request._cors_enabled = self.is_enabled(request) # type: ignore [attr-defined]
|
||||||
|
if (
|
||||||
|
request._cors_enabled # type: ignore [attr-defined]
|
||||||
|
and request.method == "OPTIONS"
|
||||||
|
and "access-control-request-method" in request.headers
|
||||||
|
):
|
||||||
|
return HttpResponse(headers={"content-length": "0"})
|
||||||
|
return None
|
||||||
|
|
||||||
|
def add_response_headers(
|
||||||
|
self, request: HttpRequest, response: HttpResponseBase
|
||||||
|
) -> HttpResponseBase:
|
||||||
|
"""
|
||||||
|
Add the respective CORS headers
|
||||||
|
"""
|
||||||
|
enabled = getattr(request, "_cors_enabled", None)
|
||||||
|
if enabled is None:
|
||||||
|
enabled = self.is_enabled(request)
|
||||||
|
|
||||||
|
if not enabled:
|
||||||
|
return response
|
||||||
|
|
||||||
|
patch_vary_headers(response, ("origin",))
|
||||||
|
|
||||||
|
origin = request.headers.get("origin")
|
||||||
|
if not origin:
|
||||||
|
return response
|
||||||
|
|
||||||
|
try:
|
||||||
|
url = urlsplit(origin)
|
||||||
|
except ValueError:
|
||||||
|
return response
|
||||||
|
|
||||||
|
if (
|
||||||
|
not conf.CORS_ALLOW_ALL_ORIGINS
|
||||||
|
and not self.origin_found_in_white_lists(origin, url)
|
||||||
|
and not self.check_signal(request)
|
||||||
|
):
|
||||||
|
return response
|
||||||
|
|
||||||
|
if conf.CORS_ALLOW_ALL_ORIGINS and not conf.CORS_ALLOW_CREDENTIALS:
|
||||||
|
response[ACCESS_CONTROL_ALLOW_ORIGIN] = "*"
|
||||||
|
else:
|
||||||
|
response[ACCESS_CONTROL_ALLOW_ORIGIN] = origin
|
||||||
|
|
||||||
|
if conf.CORS_ALLOW_CREDENTIALS:
|
||||||
|
response[ACCESS_CONTROL_ALLOW_CREDENTIALS] = "true"
|
||||||
|
|
||||||
|
if len(conf.CORS_EXPOSE_HEADERS):
|
||||||
|
response[ACCESS_CONTROL_EXPOSE_HEADERS] = ", ".join(
|
||||||
|
conf.CORS_EXPOSE_HEADERS
|
||||||
|
)
|
||||||
|
|
||||||
|
if request.method == "OPTIONS":
|
||||||
|
response[ACCESS_CONTROL_ALLOW_HEADERS] = ", ".join(conf.CORS_ALLOW_HEADERS)
|
||||||
|
response[ACCESS_CONTROL_ALLOW_METHODS] = ", ".join(conf.CORS_ALLOW_METHODS)
|
||||||
|
if conf.CORS_PREFLIGHT_MAX_AGE:
|
||||||
|
response[ACCESS_CONTROL_MAX_AGE] = str(conf.CORS_PREFLIGHT_MAX_AGE)
|
||||||
|
|
||||||
|
if (
|
||||||
|
conf.CORS_ALLOW_PRIVATE_NETWORK
|
||||||
|
and request.headers.get(ACCESS_CONTROL_REQUEST_PRIVATE_NETWORK) == "true"
|
||||||
|
):
|
||||||
|
response[ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK] = "true"
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def origin_found_in_white_lists(self, origin: str, url: SplitResult) -> bool:
|
||||||
|
return (
|
||||||
|
(origin == "null" and origin in conf.CORS_ALLOWED_ORIGINS)
|
||||||
|
or self._url_in_whitelist(url)
|
||||||
|
or self.regex_domain_match(origin)
|
||||||
|
)
|
||||||
|
|
||||||
|
def regex_domain_match(self, origin: str) -> bool:
|
||||||
|
return any(
|
||||||
|
re.match(domain_pattern, origin)
|
||||||
|
for domain_pattern in conf.CORS_ALLOWED_ORIGIN_REGEXES
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_enabled(self, request: HttpRequest) -> bool:
|
||||||
|
return bool(
|
||||||
|
re.match(conf.CORS_URLS_REGEX, request.path_info)
|
||||||
|
) or self.check_signal(request)
|
||||||
|
|
||||||
|
def check_signal(self, request: HttpRequest) -> bool:
|
||||||
|
signal_responses = check_request_enabled.send(sender=None, request=request)
|
||||||
|
return any(return_value for function, return_value in signal_responses)
|
||||||
|
|
||||||
|
def _url_in_whitelist(self, url: SplitResult) -> bool:
|
||||||
|
origins = [urlsplit(o) for o in conf.CORS_ALLOWED_ORIGINS]
|
||||||
|
return any(
|
||||||
|
origin.scheme == url.scheme and origin.netloc == url.netloc
|
||||||
|
for origin in origins
|
||||||
|
)
|
@ -0,0 +1,8 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from django.dispatch import Signal
|
||||||
|
|
||||||
|
# If any attached handler returns Truthy, CORS will be allowed for the request.
|
||||||
|
# This can be used to build custom logic into the request handling when the
|
||||||
|
# configuration doesn't work.
|
||||||
|
check_request_enabled = Signal()
|
@ -0,0 +1 @@
|
|||||||
|
import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim();
|
24
srcs/.venv/lib/python3.11/site-packages/django/__init__.py
Normal file
24
srcs/.venv/lib/python3.11/site-packages/django/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from django.utils.version import get_version
|
||||||
|
|
||||||
|
VERSION = (4, 2, 6, "final", 0)
|
||||||
|
|
||||||
|
__version__ = get_version(VERSION)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(set_prefix=True):
|
||||||
|
"""
|
||||||
|
Configure the settings (this happens as a side effect of accessing the
|
||||||
|
first setting), configure logging and populate the app registry.
|
||||||
|
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
|
||||||
|
"""
|
||||||
|
from django.apps import apps
|
||||||
|
from django.conf import settings
|
||||||
|
from django.urls import set_script_prefix
|
||||||
|
from django.utils.log import configure_logging
|
||||||
|
|
||||||
|
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
|
||||||
|
if set_prefix:
|
||||||
|
set_script_prefix(
|
||||||
|
"/" if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
|
||||||
|
)
|
||||||
|
apps.populate(settings.INSTALLED_APPS)
|
@ -0,0 +1,9 @@
|
|||||||
|
"""
|
||||||
|
Invokes django-admin when the django module is run as a script.
|
||||||
|
|
||||||
|
Example: python -m django check
|
||||||
|
"""
|
||||||
|
from django.core import management
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
management.execute_from_command_line()
|
@ -0,0 +1,4 @@
|
|||||||
|
from .config import AppConfig
|
||||||
|
from .registry import apps
|
||||||
|
|
||||||
|
__all__ = ["AppConfig", "apps"]
|
274
srcs/.venv/lib/python3.11/site-packages/django/apps/config.py
Normal file
274
srcs/.venv/lib/python3.11/site-packages/django/apps/config.py
Normal file
@ -0,0 +1,274 @@
|
|||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.module_loading import import_string, module_has_submodule
|
||||||
|
|
||||||
|
APPS_MODULE_NAME = "apps"
|
||||||
|
MODELS_MODULE_NAME = "models"
|
||||||
|
|
||||||
|
|
||||||
|
class AppConfig:
|
||||||
|
"""Class representing a Django application and its configuration."""
|
||||||
|
|
||||||
|
def __init__(self, app_name, app_module):
|
||||||
|
# Full Python path to the application e.g. 'django.contrib.admin'.
|
||||||
|
self.name = app_name
|
||||||
|
|
||||||
|
# Root module for the application e.g. <module 'django.contrib.admin'
|
||||||
|
# from 'django/contrib/admin/__init__.py'>.
|
||||||
|
self.module = app_module
|
||||||
|
|
||||||
|
# Reference to the Apps registry that holds this AppConfig. Set by the
|
||||||
|
# registry when it registers the AppConfig instance.
|
||||||
|
self.apps = None
|
||||||
|
|
||||||
|
# The following attributes could be defined at the class level in a
|
||||||
|
# subclass, hence the test-and-set pattern.
|
||||||
|
|
||||||
|
# Last component of the Python path to the application e.g. 'admin'.
|
||||||
|
# This value must be unique across a Django project.
|
||||||
|
if not hasattr(self, "label"):
|
||||||
|
self.label = app_name.rpartition(".")[2]
|
||||||
|
if not self.label.isidentifier():
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"The app label '%s' is not a valid Python identifier." % self.label
|
||||||
|
)
|
||||||
|
|
||||||
|
# Human-readable name for the application e.g. "Admin".
|
||||||
|
if not hasattr(self, "verbose_name"):
|
||||||
|
self.verbose_name = self.label.title()
|
||||||
|
|
||||||
|
# Filesystem path to the application directory e.g.
|
||||||
|
# '/path/to/django/contrib/admin'.
|
||||||
|
if not hasattr(self, "path"):
|
||||||
|
self.path = self._path_from_module(app_module)
|
||||||
|
|
||||||
|
# Module containing models e.g. <module 'django.contrib.admin.models'
|
||||||
|
# from 'django/contrib/admin/models.py'>. Set by import_models().
|
||||||
|
# None if the application doesn't have a models module.
|
||||||
|
self.models_module = None
|
||||||
|
|
||||||
|
# Mapping of lowercase model names to model classes. Initially set to
|
||||||
|
# None to prevent accidental access before import_models() runs.
|
||||||
|
self.models = None
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s: %s>" % (self.__class__.__name__, self.label)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def default_auto_field(self):
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
return settings.DEFAULT_AUTO_FIELD
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _is_default_auto_field_overridden(self):
|
||||||
|
return self.__class__.default_auto_field is not AppConfig.default_auto_field
|
||||||
|
|
||||||
|
def _path_from_module(self, module):
|
||||||
|
"""Attempt to determine app's filesystem path from its module."""
|
||||||
|
# See #21874 for extended discussion of the behavior of this method in
|
||||||
|
# various cases.
|
||||||
|
# Convert to list because __path__ may not support indexing.
|
||||||
|
paths = list(getattr(module, "__path__", []))
|
||||||
|
if len(paths) != 1:
|
||||||
|
filename = getattr(module, "__file__", None)
|
||||||
|
if filename is not None:
|
||||||
|
paths = [os.path.dirname(filename)]
|
||||||
|
else:
|
||||||
|
# For unknown reasons, sometimes the list returned by __path__
|
||||||
|
# contains duplicates that must be removed (#25246).
|
||||||
|
paths = list(set(paths))
|
||||||
|
if len(paths) > 1:
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"The app module %r has multiple filesystem locations (%r); "
|
||||||
|
"you must configure this app with an AppConfig subclass "
|
||||||
|
"with a 'path' class attribute." % (module, paths)
|
||||||
|
)
|
||||||
|
elif not paths:
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"The app module %r has no filesystem location, "
|
||||||
|
"you must configure this app with an AppConfig subclass "
|
||||||
|
"with a 'path' class attribute." % module
|
||||||
|
)
|
||||||
|
return paths[0]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, entry):
|
||||||
|
"""
|
||||||
|
Factory that creates an app config from an entry in INSTALLED_APPS.
|
||||||
|
"""
|
||||||
|
# create() eventually returns app_config_class(app_name, app_module).
|
||||||
|
app_config_class = None
|
||||||
|
app_name = None
|
||||||
|
app_module = None
|
||||||
|
|
||||||
|
# If import_module succeeds, entry points to the app module.
|
||||||
|
try:
|
||||||
|
app_module = import_module(entry)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# If app_module has an apps submodule that defines a single
|
||||||
|
# AppConfig subclass, use it automatically.
|
||||||
|
# To prevent this, an AppConfig subclass can declare a class
|
||||||
|
# variable default = False.
|
||||||
|
# If the apps module defines more than one AppConfig subclass,
|
||||||
|
# the default one can declare default = True.
|
||||||
|
if module_has_submodule(app_module, APPS_MODULE_NAME):
|
||||||
|
mod_path = "%s.%s" % (entry, APPS_MODULE_NAME)
|
||||||
|
mod = import_module(mod_path)
|
||||||
|
# Check if there's exactly one AppConfig candidate,
|
||||||
|
# excluding those that explicitly define default = False.
|
||||||
|
app_configs = [
|
||||||
|
(name, candidate)
|
||||||
|
for name, candidate in inspect.getmembers(mod, inspect.isclass)
|
||||||
|
if (
|
||||||
|
issubclass(candidate, cls)
|
||||||
|
and candidate is not cls
|
||||||
|
and getattr(candidate, "default", True)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
if len(app_configs) == 1:
|
||||||
|
app_config_class = app_configs[0][1]
|
||||||
|
else:
|
||||||
|
# Check if there's exactly one AppConfig subclass,
|
||||||
|
# among those that explicitly define default = True.
|
||||||
|
app_configs = [
|
||||||
|
(name, candidate)
|
||||||
|
for name, candidate in app_configs
|
||||||
|
if getattr(candidate, "default", False)
|
||||||
|
]
|
||||||
|
if len(app_configs) > 1:
|
||||||
|
candidates = [repr(name) for name, _ in app_configs]
|
||||||
|
raise RuntimeError(
|
||||||
|
"%r declares more than one default AppConfig: "
|
||||||
|
"%s." % (mod_path, ", ".join(candidates))
|
||||||
|
)
|
||||||
|
elif len(app_configs) == 1:
|
||||||
|
app_config_class = app_configs[0][1]
|
||||||
|
|
||||||
|
# Use the default app config class if we didn't find anything.
|
||||||
|
if app_config_class is None:
|
||||||
|
app_config_class = cls
|
||||||
|
app_name = entry
|
||||||
|
|
||||||
|
# If import_string succeeds, entry is an app config class.
|
||||||
|
if app_config_class is None:
|
||||||
|
try:
|
||||||
|
app_config_class = import_string(entry)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
# If both import_module and import_string failed, it means that entry
|
||||||
|
# doesn't have a valid value.
|
||||||
|
if app_module is None and app_config_class is None:
|
||||||
|
# If the last component of entry starts with an uppercase letter,
|
||||||
|
# then it was likely intended to be an app config class; if not,
|
||||||
|
# an app module. Provide a nice error message in both cases.
|
||||||
|
mod_path, _, cls_name = entry.rpartition(".")
|
||||||
|
if mod_path and cls_name[0].isupper():
|
||||||
|
# We could simply re-trigger the string import exception, but
|
||||||
|
# we're going the extra mile and providing a better error
|
||||||
|
# message for typos in INSTALLED_APPS.
|
||||||
|
# This may raise ImportError, which is the best exception
|
||||||
|
# possible if the module at mod_path cannot be imported.
|
||||||
|
mod = import_module(mod_path)
|
||||||
|
candidates = [
|
||||||
|
repr(name)
|
||||||
|
for name, candidate in inspect.getmembers(mod, inspect.isclass)
|
||||||
|
if issubclass(candidate, cls) and candidate is not cls
|
||||||
|
]
|
||||||
|
msg = "Module '%s' does not contain a '%s' class." % (
|
||||||
|
mod_path,
|
||||||
|
cls_name,
|
||||||
|
)
|
||||||
|
if candidates:
|
||||||
|
msg += " Choices are: %s." % ", ".join(candidates)
|
||||||
|
raise ImportError(msg)
|
||||||
|
else:
|
||||||
|
# Re-trigger the module import exception.
|
||||||
|
import_module(entry)
|
||||||
|
|
||||||
|
# Check for obvious errors. (This check prevents duck typing, but
|
||||||
|
# it could be removed if it became a problem in practice.)
|
||||||
|
if not issubclass(app_config_class, AppConfig):
|
||||||
|
raise ImproperlyConfigured("'%s' isn't a subclass of AppConfig." % entry)
|
||||||
|
|
||||||
|
# Obtain app name here rather than in AppClass.__init__ to keep
|
||||||
|
# all error checking for entries in INSTALLED_APPS in one place.
|
||||||
|
if app_name is None:
|
||||||
|
try:
|
||||||
|
app_name = app_config_class.name
|
||||||
|
except AttributeError:
|
||||||
|
raise ImproperlyConfigured("'%s' must supply a name attribute." % entry)
|
||||||
|
|
||||||
|
# Ensure app_name points to a valid module.
|
||||||
|
try:
|
||||||
|
app_module = import_module(app_name)
|
||||||
|
except ImportError:
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"Cannot import '%s'. Check that '%s.%s.name' is correct."
|
||||||
|
% (
|
||||||
|
app_name,
|
||||||
|
app_config_class.__module__,
|
||||||
|
app_config_class.__qualname__,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Entry is a path to an app config class.
|
||||||
|
return app_config_class(app_name, app_module)
|
||||||
|
|
||||||
|
def get_model(self, model_name, require_ready=True):
|
||||||
|
"""
|
||||||
|
Return the model with the given case-insensitive model_name.
|
||||||
|
|
||||||
|
Raise LookupError if no model exists with this name.
|
||||||
|
"""
|
||||||
|
if require_ready:
|
||||||
|
self.apps.check_models_ready()
|
||||||
|
else:
|
||||||
|
self.apps.check_apps_ready()
|
||||||
|
try:
|
||||||
|
return self.models[model_name.lower()]
|
||||||
|
except KeyError:
|
||||||
|
raise LookupError(
|
||||||
|
"App '%s' doesn't have a '%s' model." % (self.label, model_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_models(self, include_auto_created=False, include_swapped=False):
|
||||||
|
"""
|
||||||
|
Return an iterable of models.
|
||||||
|
|
||||||
|
By default, the following models aren't included:
|
||||||
|
|
||||||
|
- auto-created models for many-to-many relations without
|
||||||
|
an explicit intermediate table,
|
||||||
|
- models that have been swapped out.
|
||||||
|
|
||||||
|
Set the corresponding keyword argument to True to include such models.
|
||||||
|
Keyword arguments aren't documented; they're a private API.
|
||||||
|
"""
|
||||||
|
self.apps.check_models_ready()
|
||||||
|
for model in self.models.values():
|
||||||
|
if model._meta.auto_created and not include_auto_created:
|
||||||
|
continue
|
||||||
|
if model._meta.swapped and not include_swapped:
|
||||||
|
continue
|
||||||
|
yield model
|
||||||
|
|
||||||
|
def import_models(self):
|
||||||
|
# Dictionary of models for this app, primarily maintained in the
|
||||||
|
# 'all_models' attribute of the Apps this AppConfig is attached to.
|
||||||
|
self.models = self.apps.all_models[self.label]
|
||||||
|
|
||||||
|
if module_has_submodule(self.module, MODELS_MODULE_NAME):
|
||||||
|
models_module_name = "%s.%s" % (self.name, MODELS_MODULE_NAME)
|
||||||
|
self.models_module = import_module(models_module_name)
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
"""
|
||||||
|
Override this method in subclasses to run code when Django starts.
|
||||||
|
"""
|
436
srcs/.venv/lib/python3.11/site-packages/django/apps/registry.py
Normal file
436
srcs/.venv/lib/python3.11/site-packages/django/apps/registry.py
Normal file
@ -0,0 +1,436 @@
|
|||||||
|
import functools
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import warnings
|
||||||
|
from collections import Counter, defaultdict
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
|
||||||
|
|
||||||
|
from .config import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class Apps:
|
||||||
|
"""
|
||||||
|
A registry that stores the configuration of installed applications.
|
||||||
|
|
||||||
|
It also keeps track of models, e.g. to provide reverse relations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, installed_apps=()):
|
||||||
|
# installed_apps is set to None when creating the main registry
|
||||||
|
# because it cannot be populated at that point. Other registries must
|
||||||
|
# provide a list of installed apps and are populated immediately.
|
||||||
|
if installed_apps is None and hasattr(sys.modules[__name__], "apps"):
|
||||||
|
raise RuntimeError("You must supply an installed_apps argument.")
|
||||||
|
|
||||||
|
# Mapping of app labels => model names => model classes. Every time a
|
||||||
|
# model is imported, ModelBase.__new__ calls apps.register_model which
|
||||||
|
# creates an entry in all_models. All imported models are registered,
|
||||||
|
# regardless of whether they're defined in an installed application
|
||||||
|
# and whether the registry has been populated. Since it isn't possible
|
||||||
|
# to reimport a module safely (it could reexecute initialization code)
|
||||||
|
# all_models is never overridden or reset.
|
||||||
|
self.all_models = defaultdict(dict)
|
||||||
|
|
||||||
|
# Mapping of labels to AppConfig instances for installed apps.
|
||||||
|
self.app_configs = {}
|
||||||
|
|
||||||
|
# Stack of app_configs. Used to store the current state in
|
||||||
|
# set_available_apps and set_installed_apps.
|
||||||
|
self.stored_app_configs = []
|
||||||
|
|
||||||
|
# Whether the registry is populated.
|
||||||
|
self.apps_ready = self.models_ready = self.ready = False
|
||||||
|
# For the autoreloader.
|
||||||
|
self.ready_event = threading.Event()
|
||||||
|
|
||||||
|
# Lock for thread-safe population.
|
||||||
|
self._lock = threading.RLock()
|
||||||
|
self.loading = False
|
||||||
|
|
||||||
|
# Maps ("app_label", "modelname") tuples to lists of functions to be
|
||||||
|
# called when the corresponding model is ready. Used by this class's
|
||||||
|
# `lazy_model_operation()` and `do_pending_operations()` methods.
|
||||||
|
self._pending_operations = defaultdict(list)
|
||||||
|
|
||||||
|
# Populate apps and models, unless it's the main registry.
|
||||||
|
if installed_apps is not None:
|
||||||
|
self.populate(installed_apps)
|
||||||
|
|
||||||
|
def populate(self, installed_apps=None):
|
||||||
|
"""
|
||||||
|
Load application configurations and models.
|
||||||
|
|
||||||
|
Import each application module and then each model module.
|
||||||
|
|
||||||
|
It is thread-safe and idempotent, but not reentrant.
|
||||||
|
"""
|
||||||
|
if self.ready:
|
||||||
|
return
|
||||||
|
|
||||||
|
# populate() might be called by two threads in parallel on servers
|
||||||
|
# that create threads before initializing the WSGI callable.
|
||||||
|
with self._lock:
|
||||||
|
if self.ready:
|
||||||
|
return
|
||||||
|
|
||||||
|
# An RLock prevents other threads from entering this section. The
|
||||||
|
# compare and set operation below is atomic.
|
||||||
|
if self.loading:
|
||||||
|
# Prevent reentrant calls to avoid running AppConfig.ready()
|
||||||
|
# methods twice.
|
||||||
|
raise RuntimeError("populate() isn't reentrant")
|
||||||
|
self.loading = True
|
||||||
|
|
||||||
|
# Phase 1: initialize app configs and import app modules.
|
||||||
|
for entry in installed_apps:
|
||||||
|
if isinstance(entry, AppConfig):
|
||||||
|
app_config = entry
|
||||||
|
else:
|
||||||
|
app_config = AppConfig.create(entry)
|
||||||
|
if app_config.label in self.app_configs:
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"Application labels aren't unique, "
|
||||||
|
"duplicates: %s" % app_config.label
|
||||||
|
)
|
||||||
|
|
||||||
|
self.app_configs[app_config.label] = app_config
|
||||||
|
app_config.apps = self
|
||||||
|
|
||||||
|
# Check for duplicate app names.
|
||||||
|
counts = Counter(
|
||||||
|
app_config.name for app_config in self.app_configs.values()
|
||||||
|
)
|
||||||
|
duplicates = [name for name, count in counts.most_common() if count > 1]
|
||||||
|
if duplicates:
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"Application names aren't unique, "
|
||||||
|
"duplicates: %s" % ", ".join(duplicates)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.apps_ready = True
|
||||||
|
|
||||||
|
# Phase 2: import models modules.
|
||||||
|
for app_config in self.app_configs.values():
|
||||||
|
app_config.import_models()
|
||||||
|
|
||||||
|
self.clear_cache()
|
||||||
|
|
||||||
|
self.models_ready = True
|
||||||
|
|
||||||
|
# Phase 3: run ready() methods of app configs.
|
||||||
|
for app_config in self.get_app_configs():
|
||||||
|
app_config.ready()
|
||||||
|
|
||||||
|
self.ready = True
|
||||||
|
self.ready_event.set()
|
||||||
|
|
||||||
|
def check_apps_ready(self):
|
||||||
|
"""Raise an exception if all apps haven't been imported yet."""
|
||||||
|
if not self.apps_ready:
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# If "not ready" is due to unconfigured settings, accessing
|
||||||
|
# INSTALLED_APPS raises a more helpful ImproperlyConfigured
|
||||||
|
# exception.
|
||||||
|
settings.INSTALLED_APPS
|
||||||
|
raise AppRegistryNotReady("Apps aren't loaded yet.")
|
||||||
|
|
||||||
|
def check_models_ready(self):
|
||||||
|
"""Raise an exception if all models haven't been imported yet."""
|
||||||
|
if not self.models_ready:
|
||||||
|
raise AppRegistryNotReady("Models aren't loaded yet.")
|
||||||
|
|
||||||
|
def get_app_configs(self):
|
||||||
|
"""Import applications and return an iterable of app configs."""
|
||||||
|
self.check_apps_ready()
|
||||||
|
return self.app_configs.values()
|
||||||
|
|
||||||
|
def get_app_config(self, app_label):
|
||||||
|
"""
|
||||||
|
Import applications and returns an app config for the given label.
|
||||||
|
|
||||||
|
Raise LookupError if no application exists with this label.
|
||||||
|
"""
|
||||||
|
self.check_apps_ready()
|
||||||
|
try:
|
||||||
|
return self.app_configs[app_label]
|
||||||
|
except KeyError:
|
||||||
|
message = "No installed app with label '%s'." % app_label
|
||||||
|
for app_config in self.get_app_configs():
|
||||||
|
if app_config.name == app_label:
|
||||||
|
message += " Did you mean '%s'?" % app_config.label
|
||||||
|
break
|
||||||
|
raise LookupError(message)
|
||||||
|
|
||||||
|
# This method is performance-critical at least for Django's test suite.
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def get_models(self, include_auto_created=False, include_swapped=False):
|
||||||
|
"""
|
||||||
|
Return a list of all installed models.
|
||||||
|
|
||||||
|
By default, the following models aren't included:
|
||||||
|
|
||||||
|
- auto-created models for many-to-many relations without
|
||||||
|
an explicit intermediate table,
|
||||||
|
- models that have been swapped out.
|
||||||
|
|
||||||
|
Set the corresponding keyword argument to True to include such models.
|
||||||
|
"""
|
||||||
|
self.check_models_ready()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for app_config in self.app_configs.values():
|
||||||
|
result.extend(app_config.get_models(include_auto_created, include_swapped))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_model(self, app_label, model_name=None, require_ready=True):
|
||||||
|
"""
|
||||||
|
Return the model matching the given app_label and model_name.
|
||||||
|
|
||||||
|
As a shortcut, app_label may be in the form <app_label>.<model_name>.
|
||||||
|
|
||||||
|
model_name is case-insensitive.
|
||||||
|
|
||||||
|
Raise LookupError if no application exists with this label, or no
|
||||||
|
model exists with this name in the application. Raise ValueError if
|
||||||
|
called with a single argument that doesn't contain exactly one dot.
|
||||||
|
"""
|
||||||
|
if require_ready:
|
||||||
|
self.check_models_ready()
|
||||||
|
else:
|
||||||
|
self.check_apps_ready()
|
||||||
|
|
||||||
|
if model_name is None:
|
||||||
|
app_label, model_name = app_label.split(".")
|
||||||
|
|
||||||
|
app_config = self.get_app_config(app_label)
|
||||||
|
|
||||||
|
if not require_ready and app_config.models is None:
|
||||||
|
app_config.import_models()
|
||||||
|
|
||||||
|
return app_config.get_model(model_name, require_ready=require_ready)
|
||||||
|
|
||||||
|
def register_model(self, app_label, model):
|
||||||
|
# Since this method is called when models are imported, it cannot
|
||||||
|
# perform imports because of the risk of import loops. It mustn't
|
||||||
|
# call get_app_config().
|
||||||
|
model_name = model._meta.model_name
|
||||||
|
app_models = self.all_models[app_label]
|
||||||
|
if model_name in app_models:
|
||||||
|
if (
|
||||||
|
model.__name__ == app_models[model_name].__name__
|
||||||
|
and model.__module__ == app_models[model_name].__module__
|
||||||
|
):
|
||||||
|
warnings.warn(
|
||||||
|
"Model '%s.%s' was already registered. Reloading models is not "
|
||||||
|
"advised as it can lead to inconsistencies, most notably with "
|
||||||
|
"related models." % (app_label, model_name),
|
||||||
|
RuntimeWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Conflicting '%s' models in application '%s': %s and %s."
|
||||||
|
% (model_name, app_label, app_models[model_name], model)
|
||||||
|
)
|
||||||
|
app_models[model_name] = model
|
||||||
|
self.do_pending_operations(model)
|
||||||
|
self.clear_cache()
|
||||||
|
|
||||||
|
def is_installed(self, app_name):
|
||||||
|
"""
|
||||||
|
Check whether an application with this name exists in the registry.
|
||||||
|
|
||||||
|
app_name is the full name of the app e.g. 'django.contrib.admin'.
|
||||||
|
"""
|
||||||
|
self.check_apps_ready()
|
||||||
|
return any(ac.name == app_name for ac in self.app_configs.values())
|
||||||
|
|
||||||
|
def get_containing_app_config(self, object_name):
|
||||||
|
"""
|
||||||
|
Look for an app config containing a given object.
|
||||||
|
|
||||||
|
object_name is the dotted Python path to the object.
|
||||||
|
|
||||||
|
Return the app config for the inner application in case of nesting.
|
||||||
|
Return None if the object isn't in any registered app config.
|
||||||
|
"""
|
||||||
|
self.check_apps_ready()
|
||||||
|
candidates = []
|
||||||
|
for app_config in self.app_configs.values():
|
||||||
|
if object_name.startswith(app_config.name):
|
||||||
|
subpath = object_name[len(app_config.name) :]
|
||||||
|
if subpath == "" or subpath[0] == ".":
|
||||||
|
candidates.append(app_config)
|
||||||
|
if candidates:
|
||||||
|
return sorted(candidates, key=lambda ac: -len(ac.name))[0]
|
||||||
|
|
||||||
|
def get_registered_model(self, app_label, model_name):
|
||||||
|
"""
|
||||||
|
Similar to get_model(), but doesn't require that an app exists with
|
||||||
|
the given app_label.
|
||||||
|
|
||||||
|
It's safe to call this method at import time, even while the registry
|
||||||
|
is being populated.
|
||||||
|
"""
|
||||||
|
model = self.all_models[app_label].get(model_name.lower())
|
||||||
|
if model is None:
|
||||||
|
raise LookupError("Model '%s.%s' not registered." % (app_label, model_name))
|
||||||
|
return model
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def get_swappable_settings_name(self, to_string):
|
||||||
|
"""
|
||||||
|
For a given model string (e.g. "auth.User"), return the name of the
|
||||||
|
corresponding settings name if it refers to a swappable model. If the
|
||||||
|
referred model is not swappable, return None.
|
||||||
|
|
||||||
|
This method is decorated with lru_cache because it's performance
|
||||||
|
critical when it comes to migrations. Since the swappable settings don't
|
||||||
|
change after Django has loaded the settings, there is no reason to get
|
||||||
|
the respective settings attribute over and over again.
|
||||||
|
"""
|
||||||
|
to_string = to_string.lower()
|
||||||
|
for model in self.get_models(include_swapped=True):
|
||||||
|
swapped = model._meta.swapped
|
||||||
|
# Is this model swapped out for the model given by to_string?
|
||||||
|
if swapped and swapped.lower() == to_string:
|
||||||
|
return model._meta.swappable
|
||||||
|
# Is this model swappable and the one given by to_string?
|
||||||
|
if model._meta.swappable and model._meta.label_lower == to_string:
|
||||||
|
return model._meta.swappable
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_available_apps(self, available):
|
||||||
|
"""
|
||||||
|
Restrict the set of installed apps used by get_app_config[s].
|
||||||
|
|
||||||
|
available must be an iterable of application names.
|
||||||
|
|
||||||
|
set_available_apps() must be balanced with unset_available_apps().
|
||||||
|
|
||||||
|
Primarily used for performance optimization in TransactionTestCase.
|
||||||
|
|
||||||
|
This method is safe in the sense that it doesn't trigger any imports.
|
||||||
|
"""
|
||||||
|
available = set(available)
|
||||||
|
installed = {app_config.name for app_config in self.get_app_configs()}
|
||||||
|
if not available.issubset(installed):
|
||||||
|
raise ValueError(
|
||||||
|
"Available apps isn't a subset of installed apps, extra apps: %s"
|
||||||
|
% ", ".join(available - installed)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.stored_app_configs.append(self.app_configs)
|
||||||
|
self.app_configs = {
|
||||||
|
label: app_config
|
||||||
|
for label, app_config in self.app_configs.items()
|
||||||
|
if app_config.name in available
|
||||||
|
}
|
||||||
|
self.clear_cache()
|
||||||
|
|
||||||
|
def unset_available_apps(self):
|
||||||
|
"""Cancel a previous call to set_available_apps()."""
|
||||||
|
self.app_configs = self.stored_app_configs.pop()
|
||||||
|
self.clear_cache()
|
||||||
|
|
||||||
|
def set_installed_apps(self, installed):
|
||||||
|
"""
|
||||||
|
Enable a different set of installed apps for get_app_config[s].
|
||||||
|
|
||||||
|
installed must be an iterable in the same format as INSTALLED_APPS.
|
||||||
|
|
||||||
|
set_installed_apps() must be balanced with unset_installed_apps(),
|
||||||
|
even if it exits with an exception.
|
||||||
|
|
||||||
|
Primarily used as a receiver of the setting_changed signal in tests.
|
||||||
|
|
||||||
|
This method may trigger new imports, which may add new models to the
|
||||||
|
registry of all imported models. They will stay in the registry even
|
||||||
|
after unset_installed_apps(). Since it isn't possible to replay
|
||||||
|
imports safely (e.g. that could lead to registering listeners twice),
|
||||||
|
models are registered when they're imported and never removed.
|
||||||
|
"""
|
||||||
|
if not self.ready:
|
||||||
|
raise AppRegistryNotReady("App registry isn't ready yet.")
|
||||||
|
self.stored_app_configs.append(self.app_configs)
|
||||||
|
self.app_configs = {}
|
||||||
|
self.apps_ready = self.models_ready = self.loading = self.ready = False
|
||||||
|
self.clear_cache()
|
||||||
|
self.populate(installed)
|
||||||
|
|
||||||
|
def unset_installed_apps(self):
|
||||||
|
"""Cancel a previous call to set_installed_apps()."""
|
||||||
|
self.app_configs = self.stored_app_configs.pop()
|
||||||
|
self.apps_ready = self.models_ready = self.ready = True
|
||||||
|
self.clear_cache()
|
||||||
|
|
||||||
|
def clear_cache(self):
|
||||||
|
"""
|
||||||
|
Clear all internal caches, for methods that alter the app registry.
|
||||||
|
|
||||||
|
This is mostly used in tests.
|
||||||
|
"""
|
||||||
|
# Call expire cache on each model. This will purge
|
||||||
|
# the relation tree and the fields cache.
|
||||||
|
self.get_models.cache_clear()
|
||||||
|
if self.ready:
|
||||||
|
# Circumvent self.get_models() to prevent that the cache is refilled.
|
||||||
|
# This particularly prevents that an empty value is cached while cloning.
|
||||||
|
for app_config in self.app_configs.values():
|
||||||
|
for model in app_config.get_models(include_auto_created=True):
|
||||||
|
model._meta._expire_cache()
|
||||||
|
|
||||||
|
def lazy_model_operation(self, function, *model_keys):
|
||||||
|
"""
|
||||||
|
Take a function and a number of ("app_label", "modelname") tuples, and
|
||||||
|
when all the corresponding models have been imported and registered,
|
||||||
|
call the function with the model classes as its arguments.
|
||||||
|
|
||||||
|
The function passed to this method must accept exactly n models as
|
||||||
|
arguments, where n=len(model_keys).
|
||||||
|
"""
|
||||||
|
# Base case: no arguments, just execute the function.
|
||||||
|
if not model_keys:
|
||||||
|
function()
|
||||||
|
# Recursive case: take the head of model_keys, wait for the
|
||||||
|
# corresponding model class to be imported and registered, then apply
|
||||||
|
# that argument to the supplied function. Pass the resulting partial
|
||||||
|
# to lazy_model_operation() along with the remaining model args and
|
||||||
|
# repeat until all models are loaded and all arguments are applied.
|
||||||
|
else:
|
||||||
|
next_model, *more_models = model_keys
|
||||||
|
|
||||||
|
# This will be executed after the class corresponding to next_model
|
||||||
|
# has been imported and registered. The `func` attribute provides
|
||||||
|
# duck-type compatibility with partials.
|
||||||
|
def apply_next_model(model):
|
||||||
|
next_function = partial(apply_next_model.func, model)
|
||||||
|
self.lazy_model_operation(next_function, *more_models)
|
||||||
|
|
||||||
|
apply_next_model.func = function
|
||||||
|
|
||||||
|
# If the model has already been imported and registered, partially
|
||||||
|
# apply it to the function now. If not, add it to the list of
|
||||||
|
# pending operations for the model, where it will be executed with
|
||||||
|
# the model class as its sole argument once the model is ready.
|
||||||
|
try:
|
||||||
|
model_class = self.get_registered_model(*next_model)
|
||||||
|
except LookupError:
|
||||||
|
self._pending_operations[next_model].append(apply_next_model)
|
||||||
|
else:
|
||||||
|
apply_next_model(model_class)
|
||||||
|
|
||||||
|
def do_pending_operations(self, model):
|
||||||
|
"""
|
||||||
|
Take a newly-prepared model and pass it to each function waiting for
|
||||||
|
it. This is called at the very end of Apps.register_model().
|
||||||
|
"""
|
||||||
|
key = model._meta.app_label, model._meta.model_name
|
||||||
|
for function in self._pending_operations.pop(key, []):
|
||||||
|
function(model)
|
||||||
|
|
||||||
|
|
||||||
|
apps = Apps(installed_apps=None)
|
403
srcs/.venv/lib/python3.11/site-packages/django/conf/__init__.py
Normal file
403
srcs/.venv/lib/python3.11/site-packages/django/conf/__init__.py
Normal file
@ -0,0 +1,403 @@
|
|||||||
|
"""
|
||||||
|
Settings and configuration for Django.
|
||||||
|
|
||||||
|
Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
|
||||||
|
variable, and then from django.conf.global_settings; see the global_settings.py
|
||||||
|
for a list of all possible variables.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
import warnings
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import django
|
||||||
|
from django.conf import global_settings
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
from django.utils.deprecation import RemovedInDjango50Warning, RemovedInDjango51Warning
|
||||||
|
from django.utils.functional import LazyObject, empty
|
||||||
|
|
||||||
|
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
|
||||||
|
DEFAULT_STORAGE_ALIAS = "default"
|
||||||
|
STATICFILES_STORAGE_ALIAS = "staticfiles"
|
||||||
|
|
||||||
|
# RemovedInDjango50Warning
|
||||||
|
USE_DEPRECATED_PYTZ_DEPRECATED_MSG = (
|
||||||
|
"The USE_DEPRECATED_PYTZ setting, and support for pytz timezones is "
|
||||||
|
"deprecated in favor of the stdlib zoneinfo module. Please update your "
|
||||||
|
"code to use zoneinfo and remove the USE_DEPRECATED_PYTZ setting."
|
||||||
|
)
|
||||||
|
|
||||||
|
USE_L10N_DEPRECATED_MSG = (
|
||||||
|
"The USE_L10N setting is deprecated. Starting with Django 5.0, localized "
|
||||||
|
"formatting of data will always be enabled. For example Django will "
|
||||||
|
"display numbers and dates using the format of the current locale."
|
||||||
|
)
|
||||||
|
|
||||||
|
CSRF_COOKIE_MASKED_DEPRECATED_MSG = (
|
||||||
|
"The CSRF_COOKIE_MASKED transitional setting is deprecated. Support for "
|
||||||
|
"it will be removed in Django 5.0."
|
||||||
|
)
|
||||||
|
|
||||||
|
DEFAULT_FILE_STORAGE_DEPRECATED_MSG = (
|
||||||
|
"The DEFAULT_FILE_STORAGE setting is deprecated. Use STORAGES instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
STATICFILES_STORAGE_DEPRECATED_MSG = (
|
||||||
|
"The STATICFILES_STORAGE setting is deprecated. Use STORAGES instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsReference(str):
|
||||||
|
"""
|
||||||
|
String subclass which references a current settings value. It's treated as
|
||||||
|
the value in memory but serializes to a settings.NAME attribute reference.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(self, value, setting_name):
|
||||||
|
return str.__new__(self, value)
|
||||||
|
|
||||||
|
def __init__(self, value, setting_name):
|
||||||
|
self.setting_name = setting_name
|
||||||
|
|
||||||
|
|
||||||
|
class LazySettings(LazyObject):
|
||||||
|
"""
|
||||||
|
A lazy proxy for either global Django settings or a custom settings object.
|
||||||
|
The user can manually configure settings prior to using them. Otherwise,
|
||||||
|
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _setup(self, name=None):
|
||||||
|
"""
|
||||||
|
Load the settings module pointed to by the environment variable. This
|
||||||
|
is used the first time settings are needed, if the user hasn't
|
||||||
|
configured settings manually.
|
||||||
|
"""
|
||||||
|
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
|
||||||
|
if not settings_module:
|
||||||
|
desc = ("setting %s" % name) if name else "settings"
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"Requested %s, but settings are not configured. "
|
||||||
|
"You must either define the environment variable %s "
|
||||||
|
"or call settings.configure() before accessing settings."
|
||||||
|
% (desc, ENVIRONMENT_VARIABLE)
|
||||||
|
)
|
||||||
|
|
||||||
|
self._wrapped = Settings(settings_module)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# Hardcode the class name as otherwise it yields 'Settings'.
|
||||||
|
if self._wrapped is empty:
|
||||||
|
return "<LazySettings [Unevaluated]>"
|
||||||
|
return '<LazySettings "%(settings_module)s">' % {
|
||||||
|
"settings_module": self._wrapped.SETTINGS_MODULE,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
"""Return the value of a setting and cache it in self.__dict__."""
|
||||||
|
if (_wrapped := self._wrapped) is empty:
|
||||||
|
self._setup(name)
|
||||||
|
_wrapped = self._wrapped
|
||||||
|
val = getattr(_wrapped, name)
|
||||||
|
|
||||||
|
# Special case some settings which require further modification.
|
||||||
|
# This is done here for performance reasons so the modified value is cached.
|
||||||
|
if name in {"MEDIA_URL", "STATIC_URL"} and val is not None:
|
||||||
|
val = self._add_script_prefix(val)
|
||||||
|
elif name == "SECRET_KEY" and not val:
|
||||||
|
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
|
||||||
|
|
||||||
|
self.__dict__[name] = val
|
||||||
|
return val
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
"""
|
||||||
|
Set the value of setting. Clear all cached values if _wrapped changes
|
||||||
|
(@override_settings does this) or clear single values when set.
|
||||||
|
"""
|
||||||
|
if name == "_wrapped":
|
||||||
|
self.__dict__.clear()
|
||||||
|
else:
|
||||||
|
self.__dict__.pop(name, None)
|
||||||
|
super().__setattr__(name, value)
|
||||||
|
|
||||||
|
def __delattr__(self, name):
|
||||||
|
"""Delete a setting and clear it from cache if needed."""
|
||||||
|
super().__delattr__(name)
|
||||||
|
self.__dict__.pop(name, None)
|
||||||
|
|
||||||
|
def configure(self, default_settings=global_settings, **options):
|
||||||
|
"""
|
||||||
|
Called to manually configure the settings. The 'default_settings'
|
||||||
|
parameter sets where to retrieve any unspecified values from (its
|
||||||
|
argument must support attribute access (__getattr__)).
|
||||||
|
"""
|
||||||
|
if self._wrapped is not empty:
|
||||||
|
raise RuntimeError("Settings already configured.")
|
||||||
|
holder = UserSettingsHolder(default_settings)
|
||||||
|
for name, value in options.items():
|
||||||
|
if not name.isupper():
|
||||||
|
raise TypeError("Setting %r must be uppercase." % name)
|
||||||
|
setattr(holder, name, value)
|
||||||
|
self._wrapped = holder
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _add_script_prefix(value):
|
||||||
|
"""
|
||||||
|
Add SCRIPT_NAME prefix to relative paths.
|
||||||
|
|
||||||
|
Useful when the app is being served at a subpath and manually prefixing
|
||||||
|
subpath to STATIC_URL and MEDIA_URL in settings is inconvenient.
|
||||||
|
"""
|
||||||
|
# Don't apply prefix to absolute paths and URLs.
|
||||||
|
if value.startswith(("http://", "https://", "/")):
|
||||||
|
return value
|
||||||
|
from django.urls import get_script_prefix
|
||||||
|
|
||||||
|
return "%s%s" % (get_script_prefix(), value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def configured(self):
|
||||||
|
"""Return True if the settings have already been configured."""
|
||||||
|
return self._wrapped is not empty
|
||||||
|
|
||||||
|
def _show_deprecation_warning(self, message, category):
|
||||||
|
stack = traceback.extract_stack()
|
||||||
|
# Show a warning if the setting is used outside of Django.
|
||||||
|
# Stack index: -1 this line, -2 the property, -3 the
|
||||||
|
# LazyObject __getattribute__(), -4 the caller.
|
||||||
|
filename, _, _, _ = stack[-4]
|
||||||
|
if not filename.startswith(os.path.dirname(django.__file__)):
|
||||||
|
warnings.warn(message, category, stacklevel=2)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def USE_L10N(self):
|
||||||
|
self._show_deprecation_warning(
|
||||||
|
USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning
|
||||||
|
)
|
||||||
|
return self.__getattr__("USE_L10N")
|
||||||
|
|
||||||
|
# RemovedInDjango50Warning.
|
||||||
|
@property
|
||||||
|
def _USE_L10N_INTERNAL(self):
|
||||||
|
# Special hook to avoid checking a traceback in internal use on hot
|
||||||
|
# paths.
|
||||||
|
return self.__getattr__("USE_L10N")
|
||||||
|
|
||||||
|
# RemovedInDjango51Warning.
|
||||||
|
@property
|
||||||
|
def DEFAULT_FILE_STORAGE(self):
|
||||||
|
self._show_deprecation_warning(
|
||||||
|
DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning
|
||||||
|
)
|
||||||
|
return self.__getattr__("DEFAULT_FILE_STORAGE")
|
||||||
|
|
||||||
|
# RemovedInDjango51Warning.
|
||||||
|
@property
|
||||||
|
def STATICFILES_STORAGE(self):
|
||||||
|
self._show_deprecation_warning(
|
||||||
|
STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning
|
||||||
|
)
|
||||||
|
return self.__getattr__("STATICFILES_STORAGE")
|
||||||
|
|
||||||
|
|
||||||
|
class Settings:
|
||||||
|
def __init__(self, settings_module):
|
||||||
|
# update this dict from global settings (but only for ALL_CAPS settings)
|
||||||
|
for setting in dir(global_settings):
|
||||||
|
if setting.isupper():
|
||||||
|
setattr(self, setting, getattr(global_settings, setting))
|
||||||
|
|
||||||
|
# store the settings module in case someone later cares
|
||||||
|
self.SETTINGS_MODULE = settings_module
|
||||||
|
|
||||||
|
mod = importlib.import_module(self.SETTINGS_MODULE)
|
||||||
|
|
||||||
|
tuple_settings = (
|
||||||
|
"ALLOWED_HOSTS",
|
||||||
|
"INSTALLED_APPS",
|
||||||
|
"TEMPLATE_DIRS",
|
||||||
|
"LOCALE_PATHS",
|
||||||
|
"SECRET_KEY_FALLBACKS",
|
||||||
|
)
|
||||||
|
self._explicit_settings = set()
|
||||||
|
for setting in dir(mod):
|
||||||
|
if setting.isupper():
|
||||||
|
setting_value = getattr(mod, setting)
|
||||||
|
|
||||||
|
if setting in tuple_settings and not isinstance(
|
||||||
|
setting_value, (list, tuple)
|
||||||
|
):
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"The %s setting must be a list or a tuple." % setting
|
||||||
|
)
|
||||||
|
setattr(self, setting, setting_value)
|
||||||
|
self._explicit_settings.add(setting)
|
||||||
|
|
||||||
|
if self.USE_TZ is False and not self.is_overridden("USE_TZ"):
|
||||||
|
warnings.warn(
|
||||||
|
"The default value of USE_TZ will change from False to True "
|
||||||
|
"in Django 5.0. Set USE_TZ to False in your project settings "
|
||||||
|
"if you want to keep the current default behavior.",
|
||||||
|
category=RemovedInDjango50Warning,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.is_overridden("USE_DEPRECATED_PYTZ"):
|
||||||
|
warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
|
||||||
|
|
||||||
|
if self.is_overridden("CSRF_COOKIE_MASKED"):
|
||||||
|
warnings.warn(CSRF_COOKIE_MASKED_DEPRECATED_MSG, RemovedInDjango50Warning)
|
||||||
|
|
||||||
|
if hasattr(time, "tzset") and self.TIME_ZONE:
|
||||||
|
# When we can, attempt to validate the timezone. If we can't find
|
||||||
|
# this file, no check happens and it's harmless.
|
||||||
|
zoneinfo_root = Path("/usr/share/zoneinfo")
|
||||||
|
zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split("/"))
|
||||||
|
if zoneinfo_root.exists() and not zone_info_file.exists():
|
||||||
|
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
|
||||||
|
# Move the time zone info into os.environ. See ticket #2315 for why
|
||||||
|
# we don't do this unconditionally (breaks Windows).
|
||||||
|
os.environ["TZ"] = self.TIME_ZONE
|
||||||
|
time.tzset()
|
||||||
|
|
||||||
|
if self.is_overridden("USE_L10N"):
|
||||||
|
warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
|
||||||
|
|
||||||
|
if self.is_overridden("DEFAULT_FILE_STORAGE"):
|
||||||
|
if self.is_overridden("STORAGES"):
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"DEFAULT_FILE_STORAGE/STORAGES are mutually exclusive."
|
||||||
|
)
|
||||||
|
self.STORAGES = {
|
||||||
|
**self.STORAGES,
|
||||||
|
DEFAULT_STORAGE_ALIAS: {"BACKEND": self.DEFAULT_FILE_STORAGE},
|
||||||
|
}
|
||||||
|
warnings.warn(DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
|
||||||
|
|
||||||
|
if self.is_overridden("STATICFILES_STORAGE"):
|
||||||
|
if self.is_overridden("STORAGES"):
|
||||||
|
raise ImproperlyConfigured(
|
||||||
|
"STATICFILES_STORAGE/STORAGES are mutually exclusive."
|
||||||
|
)
|
||||||
|
self.STORAGES = {
|
||||||
|
**self.STORAGES,
|
||||||
|
STATICFILES_STORAGE_ALIAS: {"BACKEND": self.STATICFILES_STORAGE},
|
||||||
|
}
|
||||||
|
warnings.warn(STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
|
||||||
|
# RemovedInDjango51Warning.
|
||||||
|
if self.is_overridden("STORAGES"):
|
||||||
|
setattr(
|
||||||
|
self,
|
||||||
|
"DEFAULT_FILE_STORAGE",
|
||||||
|
self.STORAGES.get(DEFAULT_STORAGE_ALIAS, {}).get("BACKEND"),
|
||||||
|
)
|
||||||
|
setattr(
|
||||||
|
self,
|
||||||
|
"STATICFILES_STORAGE",
|
||||||
|
self.STORAGES.get(STATICFILES_STORAGE_ALIAS, {}).get("BACKEND"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_overridden(self, setting):
|
||||||
|
return setting in self._explicit_settings
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%(cls)s "%(settings_module)s">' % {
|
||||||
|
"cls": self.__class__.__name__,
|
||||||
|
"settings_module": self.SETTINGS_MODULE,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class UserSettingsHolder:
|
||||||
|
"""Holder for user configured settings."""
|
||||||
|
|
||||||
|
# SETTINGS_MODULE doesn't make much sense in the manually configured
|
||||||
|
# (standalone) case.
|
||||||
|
SETTINGS_MODULE = None
|
||||||
|
|
||||||
|
def __init__(self, default_settings):
|
||||||
|
"""
|
||||||
|
Requests for configuration variables not in this class are satisfied
|
||||||
|
from the module specified in default_settings (if possible).
|
||||||
|
"""
|
||||||
|
self.__dict__["_deleted"] = set()
|
||||||
|
self.default_settings = default_settings
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
if not name.isupper() or name in self._deleted:
|
||||||
|
raise AttributeError
|
||||||
|
return getattr(self.default_settings, name)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
self._deleted.discard(name)
|
||||||
|
if name == "USE_L10N":
|
||||||
|
warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
|
||||||
|
if name == "CSRF_COOKIE_MASKED":
|
||||||
|
warnings.warn(CSRF_COOKIE_MASKED_DEPRECATED_MSG, RemovedInDjango50Warning)
|
||||||
|
if name == "DEFAULT_FILE_STORAGE":
|
||||||
|
self.STORAGES[DEFAULT_STORAGE_ALIAS] = {
|
||||||
|
"BACKEND": self.DEFAULT_FILE_STORAGE
|
||||||
|
}
|
||||||
|
warnings.warn(DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
|
||||||
|
if name == "STATICFILES_STORAGE":
|
||||||
|
self.STORAGES[STATICFILES_STORAGE_ALIAS] = {
|
||||||
|
"BACKEND": self.STATICFILES_STORAGE
|
||||||
|
}
|
||||||
|
warnings.warn(STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
|
||||||
|
super().__setattr__(name, value)
|
||||||
|
if name == "USE_DEPRECATED_PYTZ":
|
||||||
|
warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
|
||||||
|
# RemovedInDjango51Warning.
|
||||||
|
if name == "STORAGES":
|
||||||
|
if default_file_storage := self.STORAGES.get(DEFAULT_STORAGE_ALIAS):
|
||||||
|
super().__setattr__(
|
||||||
|
"DEFAULT_FILE_STORAGE", default_file_storage.get("BACKEND")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.STORAGES.setdefault(
|
||||||
|
DEFAULT_STORAGE_ALIAS,
|
||||||
|
{"BACKEND": "django.core.files.storage.FileSystemStorage"},
|
||||||
|
)
|
||||||
|
if staticfiles_storage := self.STORAGES.get(STATICFILES_STORAGE_ALIAS):
|
||||||
|
super().__setattr__(
|
||||||
|
"STATICFILES_STORAGE", staticfiles_storage.get("BACKEND")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.STORAGES.setdefault(
|
||||||
|
STATICFILES_STORAGE_ALIAS,
|
||||||
|
{
|
||||||
|
"BACKEND": (
|
||||||
|
"django.contrib.staticfiles.storage.StaticFilesStorage"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def __delattr__(self, name):
|
||||||
|
self._deleted.add(name)
|
||||||
|
if hasattr(self, name):
|
||||||
|
super().__delattr__(name)
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
return sorted(
|
||||||
|
s
|
||||||
|
for s in [*self.__dict__, *dir(self.default_settings)]
|
||||||
|
if s not in self._deleted
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_overridden(self, setting):
|
||||||
|
deleted = setting in self._deleted
|
||||||
|
set_locally = setting in self.__dict__
|
||||||
|
set_on_default = getattr(
|
||||||
|
self.default_settings, "is_overridden", lambda s: False
|
||||||
|
)(setting)
|
||||||
|
return deleted or set_locally or set_on_default
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%(cls)s>" % {
|
||||||
|
"cls": self.__class__.__name__,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
settings = LazySettings()
|
@ -0,0 +1,3 @@
|
|||||||
|
from django.contrib import admin
|
||||||
|
|
||||||
|
# Register your models here.
|
@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class {{ camel_case_app_name }}Config(AppConfig):
|
||||||
|
default_auto_field = 'django.db.models.BigAutoField'
|
||||||
|
name = '{{ app_name }}'
|
@ -0,0 +1,3 @@
|
|||||||
|
from django.db import models
|
||||||
|
|
||||||
|
# Create your models here.
|
@ -0,0 +1,3 @@
|
|||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
# Create your tests here.
|
@ -0,0 +1,3 @@
|
|||||||
|
from django.shortcuts import render
|
||||||
|
|
||||||
|
# Create your views here.
|
@ -0,0 +1,681 @@
|
|||||||
|
"""
|
||||||
|
Default Django settings. Override these with settings in the module pointed to
|
||||||
|
by the DJANGO_SETTINGS_MODULE environment variable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# This is defined here as a do-nothing function because we can't import
|
||||||
|
# django.utils.translation -- that module depends on the settings.
|
||||||
|
def gettext_noop(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
####################
|
||||||
|
# CORE #
|
||||||
|
####################
|
||||||
|
|
||||||
|
DEBUG = False
|
||||||
|
|
||||||
|
# Whether the framework should propagate raw exceptions rather than catching
|
||||||
|
# them. This is useful under some testing situations and should never be used
|
||||||
|
# on a live site.
|
||||||
|
DEBUG_PROPAGATE_EXCEPTIONS = False
|
||||||
|
|
||||||
|
# People who get code error notifications. In the format
|
||||||
|
# [('Full Name', 'email@example.com'), ('Full Name', 'anotheremail@example.com')]
|
||||||
|
ADMINS = []
|
||||||
|
|
||||||
|
# List of IP addresses, as strings, that:
|
||||||
|
# * See debug comments, when DEBUG is true
|
||||||
|
# * Receive x-headers
|
||||||
|
INTERNAL_IPS = []
|
||||||
|
|
||||||
|
# Hosts/domain names that are valid for this site.
|
||||||
|
# "*" matches anything, ".example.com" matches example.com and all subdomains
|
||||||
|
ALLOWED_HOSTS = []
|
||||||
|
|
||||||
|
# Local time zone for this installation. All choices can be found here:
|
||||||
|
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
|
||||||
|
# systems may support all possibilities). When USE_TZ is True, this is
|
||||||
|
# interpreted as the default user time zone.
|
||||||
|
TIME_ZONE = "America/Chicago"
|
||||||
|
|
||||||
|
# If you set this to True, Django will use timezone-aware datetimes.
|
||||||
|
USE_TZ = False
|
||||||
|
|
||||||
|
# RemovedInDjango50Warning: It's a transitional setting helpful in migrating
|
||||||
|
# from pytz tzinfo to ZoneInfo(). Set True to continue using pytz tzinfo
|
||||||
|
# objects during the Django 4.x release cycle.
|
||||||
|
USE_DEPRECATED_PYTZ = False
|
||||||
|
|
||||||
|
# Language code for this installation. All choices can be found here:
|
||||||
|
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||||
|
LANGUAGE_CODE = "en-us"
|
||||||
|
|
||||||
|
# Languages we provide translations for, out of the box.
|
||||||
|
LANGUAGES = [
|
||||||
|
("af", gettext_noop("Afrikaans")),
|
||||||
|
("ar", gettext_noop("Arabic")),
|
||||||
|
("ar-dz", gettext_noop("Algerian Arabic")),
|
||||||
|
("ast", gettext_noop("Asturian")),
|
||||||
|
("az", gettext_noop("Azerbaijani")),
|
||||||
|
("bg", gettext_noop("Bulgarian")),
|
||||||
|
("be", gettext_noop("Belarusian")),
|
||||||
|
("bn", gettext_noop("Bengali")),
|
||||||
|
("br", gettext_noop("Breton")),
|
||||||
|
("bs", gettext_noop("Bosnian")),
|
||||||
|
("ca", gettext_noop("Catalan")),
|
||||||
|
("ckb", gettext_noop("Central Kurdish (Sorani)")),
|
||||||
|
("cs", gettext_noop("Czech")),
|
||||||
|
("cy", gettext_noop("Welsh")),
|
||||||
|
("da", gettext_noop("Danish")),
|
||||||
|
("de", gettext_noop("German")),
|
||||||
|
("dsb", gettext_noop("Lower Sorbian")),
|
||||||
|
("el", gettext_noop("Greek")),
|
||||||
|
("en", gettext_noop("English")),
|
||||||
|
("en-au", gettext_noop("Australian English")),
|
||||||
|
("en-gb", gettext_noop("British English")),
|
||||||
|
("eo", gettext_noop("Esperanto")),
|
||||||
|
("es", gettext_noop("Spanish")),
|
||||||
|
("es-ar", gettext_noop("Argentinian Spanish")),
|
||||||
|
("es-co", gettext_noop("Colombian Spanish")),
|
||||||
|
("es-mx", gettext_noop("Mexican Spanish")),
|
||||||
|
("es-ni", gettext_noop("Nicaraguan Spanish")),
|
||||||
|
("es-ve", gettext_noop("Venezuelan Spanish")),
|
||||||
|
("et", gettext_noop("Estonian")),
|
||||||
|
("eu", gettext_noop("Basque")),
|
||||||
|
("fa", gettext_noop("Persian")),
|
||||||
|
("fi", gettext_noop("Finnish")),
|
||||||
|
("fr", gettext_noop("French")),
|
||||||
|
("fy", gettext_noop("Frisian")),
|
||||||
|
("ga", gettext_noop("Irish")),
|
||||||
|
("gd", gettext_noop("Scottish Gaelic")),
|
||||||
|
("gl", gettext_noop("Galician")),
|
||||||
|
("he", gettext_noop("Hebrew")),
|
||||||
|
("hi", gettext_noop("Hindi")),
|
||||||
|
("hr", gettext_noop("Croatian")),
|
||||||
|
("hsb", gettext_noop("Upper Sorbian")),
|
||||||
|
("hu", gettext_noop("Hungarian")),
|
||||||
|
("hy", gettext_noop("Armenian")),
|
||||||
|
("ia", gettext_noop("Interlingua")),
|
||||||
|
("id", gettext_noop("Indonesian")),
|
||||||
|
("ig", gettext_noop("Igbo")),
|
||||||
|
("io", gettext_noop("Ido")),
|
||||||
|
("is", gettext_noop("Icelandic")),
|
||||||
|
("it", gettext_noop("Italian")),
|
||||||
|
("ja", gettext_noop("Japanese")),
|
||||||
|
("ka", gettext_noop("Georgian")),
|
||||||
|
("kab", gettext_noop("Kabyle")),
|
||||||
|
("kk", gettext_noop("Kazakh")),
|
||||||
|
("km", gettext_noop("Khmer")),
|
||||||
|
("kn", gettext_noop("Kannada")),
|
||||||
|
("ko", gettext_noop("Korean")),
|
||||||
|
("ky", gettext_noop("Kyrgyz")),
|
||||||
|
("lb", gettext_noop("Luxembourgish")),
|
||||||
|
("lt", gettext_noop("Lithuanian")),
|
||||||
|
("lv", gettext_noop("Latvian")),
|
||||||
|
("mk", gettext_noop("Macedonian")),
|
||||||
|
("ml", gettext_noop("Malayalam")),
|
||||||
|
("mn", gettext_noop("Mongolian")),
|
||||||
|
("mr", gettext_noop("Marathi")),
|
||||||
|
("ms", gettext_noop("Malay")),
|
||||||
|
("my", gettext_noop("Burmese")),
|
||||||
|
("nb", gettext_noop("Norwegian Bokmål")),
|
||||||
|
("ne", gettext_noop("Nepali")),
|
||||||
|
("nl", gettext_noop("Dutch")),
|
||||||
|
("nn", gettext_noop("Norwegian Nynorsk")),
|
||||||
|
("os", gettext_noop("Ossetic")),
|
||||||
|
("pa", gettext_noop("Punjabi")),
|
||||||
|
("pl", gettext_noop("Polish")),
|
||||||
|
("pt", gettext_noop("Portuguese")),
|
||||||
|
("pt-br", gettext_noop("Brazilian Portuguese")),
|
||||||
|
("ro", gettext_noop("Romanian")),
|
||||||
|
("ru", gettext_noop("Russian")),
|
||||||
|
("sk", gettext_noop("Slovak")),
|
||||||
|
("sl", gettext_noop("Slovenian")),
|
||||||
|
("sq", gettext_noop("Albanian")),
|
||||||
|
("sr", gettext_noop("Serbian")),
|
||||||
|
("sr-latn", gettext_noop("Serbian Latin")),
|
||||||
|
("sv", gettext_noop("Swedish")),
|
||||||
|
("sw", gettext_noop("Swahili")),
|
||||||
|
("ta", gettext_noop("Tamil")),
|
||||||
|
("te", gettext_noop("Telugu")),
|
||||||
|
("tg", gettext_noop("Tajik")),
|
||||||
|
("th", gettext_noop("Thai")),
|
||||||
|
("tk", gettext_noop("Turkmen")),
|
||||||
|
("tr", gettext_noop("Turkish")),
|
||||||
|
("tt", gettext_noop("Tatar")),
|
||||||
|
("udm", gettext_noop("Udmurt")),
|
||||||
|
("uk", gettext_noop("Ukrainian")),
|
||||||
|
("ur", gettext_noop("Urdu")),
|
||||||
|
("uz", gettext_noop("Uzbek")),
|
||||||
|
("vi", gettext_noop("Vietnamese")),
|
||||||
|
("zh-hans", gettext_noop("Simplified Chinese")),
|
||||||
|
("zh-hant", gettext_noop("Traditional Chinese")),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Languages using BiDi (right-to-left) layout
|
||||||
|
LANGUAGES_BIDI = ["he", "ar", "ar-dz", "ckb", "fa", "ur"]
|
||||||
|
|
||||||
|
# If you set this to False, Django will make some optimizations so as not
|
||||||
|
# to load the internationalization machinery.
|
||||||
|
USE_I18N = True
|
||||||
|
LOCALE_PATHS = []
|
||||||
|
|
||||||
|
# Settings for language cookie
|
||||||
|
LANGUAGE_COOKIE_NAME = "django_language"
|
||||||
|
LANGUAGE_COOKIE_AGE = None
|
||||||
|
LANGUAGE_COOKIE_DOMAIN = None
|
||||||
|
LANGUAGE_COOKIE_PATH = "/"
|
||||||
|
LANGUAGE_COOKIE_SECURE = False
|
||||||
|
LANGUAGE_COOKIE_HTTPONLY = False
|
||||||
|
LANGUAGE_COOKIE_SAMESITE = None
|
||||||
|
|
||||||
|
|
||||||
|
# If you set this to True, Django will format dates, numbers and calendars
|
||||||
|
# according to user current locale.
|
||||||
|
USE_L10N = True
|
||||||
|
|
||||||
|
# Not-necessarily-technical managers of the site. They get broken link
|
||||||
|
# notifications and other various emails.
|
||||||
|
MANAGERS = ADMINS
|
||||||
|
|
||||||
|
# Default charset to use for all HttpResponse objects, if a MIME type isn't
|
||||||
|
# manually specified. It's used to construct the Content-Type header.
|
||||||
|
DEFAULT_CHARSET = "utf-8"
|
||||||
|
|
||||||
|
# Email address that error messages come from.
|
||||||
|
SERVER_EMAIL = "root@localhost"
|
||||||
|
|
||||||
|
# Database connection info. If left empty, will default to the dummy backend.
|
||||||
|
DATABASES = {}
|
||||||
|
|
||||||
|
# Classes used to implement DB routing behavior.
|
||||||
|
DATABASE_ROUTERS = []
|
||||||
|
|
||||||
|
# The email backend to use. For possible shortcuts see django.core.mail.
|
||||||
|
# The default is to use the SMTP backend.
|
||||||
|
# Third-party backends can be specified by providing a Python path
|
||||||
|
# to a module that defines an EmailBackend class.
|
||||||
|
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
|
||||||
|
|
||||||
|
# Host for sending email.
|
||||||
|
EMAIL_HOST = "localhost"
|
||||||
|
|
||||||
|
# Port for sending email.
|
||||||
|
EMAIL_PORT = 25
|
||||||
|
|
||||||
|
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
|
||||||
|
EMAIL_USE_LOCALTIME = False
|
||||||
|
|
||||||
|
# Optional SMTP authentication information for EMAIL_HOST.
|
||||||
|
EMAIL_HOST_USER = ""
|
||||||
|
EMAIL_HOST_PASSWORD = ""
|
||||||
|
EMAIL_USE_TLS = False
|
||||||
|
EMAIL_USE_SSL = False
|
||||||
|
EMAIL_SSL_CERTFILE = None
|
||||||
|
EMAIL_SSL_KEYFILE = None
|
||||||
|
EMAIL_TIMEOUT = None
|
||||||
|
|
||||||
|
# List of strings representing installed apps.
|
||||||
|
INSTALLED_APPS = []
|
||||||
|
|
||||||
|
TEMPLATES = []
|
||||||
|
|
||||||
|
# Default form rendering class.
|
||||||
|
FORM_RENDERER = "django.forms.renderers.DjangoTemplates"
|
||||||
|
|
||||||
|
# Default email address to use for various automated correspondence from
|
||||||
|
# the site managers.
|
||||||
|
DEFAULT_FROM_EMAIL = "webmaster@localhost"
|
||||||
|
|
||||||
|
# Subject-line prefix for email messages send with django.core.mail.mail_admins
|
||||||
|
# or ...mail_managers. Make sure to include the trailing space.
|
||||||
|
EMAIL_SUBJECT_PREFIX = "[Django] "
|
||||||
|
|
||||||
|
# Whether to append trailing slashes to URLs.
|
||||||
|
APPEND_SLASH = True
|
||||||
|
|
||||||
|
# Whether to prepend the "www." subdomain to URLs that don't have it.
|
||||||
|
PREPEND_WWW = False
|
||||||
|
|
||||||
|
# Override the server-derived value of SCRIPT_NAME
|
||||||
|
FORCE_SCRIPT_NAME = None
|
||||||
|
|
||||||
|
# List of compiled regular expression objects representing User-Agent strings
|
||||||
|
# that are not allowed to visit any page, systemwide. Use this for bad
|
||||||
|
# robots/crawlers. Here are a few examples:
|
||||||
|
# import re
|
||||||
|
# DISALLOWED_USER_AGENTS = [
|
||||||
|
# re.compile(r'^NaverBot.*'),
|
||||||
|
# re.compile(r'^EmailSiphon.*'),
|
||||||
|
# re.compile(r'^SiteSucker.*'),
|
||||||
|
# re.compile(r'^sohu-search'),
|
||||||
|
# ]
|
||||||
|
DISALLOWED_USER_AGENTS = []
|
||||||
|
|
||||||
|
ABSOLUTE_URL_OVERRIDES = {}
|
||||||
|
|
||||||
|
# List of compiled regular expression objects representing URLs that need not
|
||||||
|
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
|
||||||
|
# import re
|
||||||
|
# IGNORABLE_404_URLS = [
|
||||||
|
# re.compile(r'^/apple-touch-icon.*\.png$'),
|
||||||
|
# re.compile(r'^/favicon.ico$'),
|
||||||
|
# re.compile(r'^/robots.txt$'),
|
||||||
|
# re.compile(r'^/phpmyadmin/'),
|
||||||
|
# re.compile(r'\.(cgi|php|pl)$'),
|
||||||
|
# ]
|
||||||
|
IGNORABLE_404_URLS = []
|
||||||
|
|
||||||
|
# A secret key for this particular Django installation. Used in secret-key
|
||||||
|
# hashing algorithms. Set this in your settings, or Django will complain
|
||||||
|
# loudly.
|
||||||
|
SECRET_KEY = ""
|
||||||
|
|
||||||
|
# List of secret keys used to verify the validity of signatures. This allows
|
||||||
|
# secret key rotation.
|
||||||
|
SECRET_KEY_FALLBACKS = []
|
||||||
|
|
||||||
|
# Default file storage mechanism that holds media.
|
||||||
|
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||||
|
|
||||||
|
STORAGES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||||
|
},
|
||||||
|
"staticfiles": {
|
||||||
|
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Absolute filesystem path to the directory that will hold user-uploaded files.
|
||||||
|
# Example: "/var/www/example.com/media/"
|
||||||
|
MEDIA_ROOT = ""
|
||||||
|
|
||||||
|
# URL that handles the media served from MEDIA_ROOT.
|
||||||
|
# Examples: "http://example.com/media/", "http://media.example.com/"
|
||||||
|
MEDIA_URL = ""
|
||||||
|
|
||||||
|
# Absolute path to the directory static files should be collected to.
|
||||||
|
# Example: "/var/www/example.com/static/"
|
||||||
|
STATIC_ROOT = None
|
||||||
|
|
||||||
|
# URL that handles the static files served from STATIC_ROOT.
|
||||||
|
# Example: "http://example.com/static/", "http://static.example.com/"
|
||||||
|
STATIC_URL = None
|
||||||
|
|
||||||
|
# List of upload handler classes to be applied in order.
|
||||||
|
FILE_UPLOAD_HANDLERS = [
|
||||||
|
"django.core.files.uploadhandler.MemoryFileUploadHandler",
|
||||||
|
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Maximum size, in bytes, of a request before it will be streamed to the
|
||||||
|
# file system instead of into memory.
|
||||||
|
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
|
||||||
|
|
||||||
|
# Maximum size in bytes of request data (excluding file uploads) that will be
|
||||||
|
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
|
||||||
|
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
|
||||||
|
|
||||||
|
# Maximum number of GET/POST parameters that will be read before a
|
||||||
|
# SuspiciousOperation (TooManyFieldsSent) is raised.
|
||||||
|
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
|
||||||
|
|
||||||
|
# Maximum number of files encoded in a multipart upload that will be read
|
||||||
|
# before a SuspiciousOperation (TooManyFilesSent) is raised.
|
||||||
|
DATA_UPLOAD_MAX_NUMBER_FILES = 100
|
||||||
|
|
||||||
|
# Directory in which upload streamed files will be temporarily saved. A value of
|
||||||
|
# `None` will make Django use the operating system's default temporary directory
|
||||||
|
# (i.e. "/tmp" on *nix systems).
|
||||||
|
FILE_UPLOAD_TEMP_DIR = None
|
||||||
|
|
||||||
|
# The numeric mode to set newly-uploaded files to. The value should be a mode
|
||||||
|
# you'd pass directly to os.chmod; see
|
||||||
|
# https://docs.python.org/library/os.html#files-and-directories.
|
||||||
|
FILE_UPLOAD_PERMISSIONS = 0o644
|
||||||
|
|
||||||
|
# The numeric mode to assign to newly-created directories, when uploading files.
|
||||||
|
# The value should be a mode as you'd pass to os.chmod;
|
||||||
|
# see https://docs.python.org/library/os.html#files-and-directories.
|
||||||
|
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
|
||||||
|
|
||||||
|
# Python module path where user will place custom format definition.
|
||||||
|
# The directory where this setting is pointing should contain subdirectories
|
||||||
|
# named as the locales, containing a formats.py file
|
||||||
|
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
|
||||||
|
FORMAT_MODULE_PATH = None
|
||||||
|
|
||||||
|
# Default formatting for date objects. See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "N j, Y"
|
||||||
|
|
||||||
|
# Default formatting for datetime objects. See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATETIME_FORMAT = "N j, Y, P"
|
||||||
|
|
||||||
|
# Default formatting for time objects. See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
TIME_FORMAT = "P"
|
||||||
|
|
||||||
|
# Default formatting for date objects when only the year and month are relevant.
|
||||||
|
# See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
YEAR_MONTH_FORMAT = "F Y"
|
||||||
|
|
||||||
|
# Default formatting for date objects when only the month and day are relevant.
|
||||||
|
# See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
MONTH_DAY_FORMAT = "F j"
|
||||||
|
|
||||||
|
# Default short formatting for date objects. See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
SHORT_DATE_FORMAT = "m/d/Y"
|
||||||
|
|
||||||
|
# Default short formatting for datetime objects.
|
||||||
|
# See all available format strings here:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
SHORT_DATETIME_FORMAT = "m/d/Y P"
|
||||||
|
|
||||||
|
# Default formats to be used when parsing dates from input boxes, in order
|
||||||
|
# See all available format string here:
|
||||||
|
# https://docs.python.org/library/datetime.html#strftime-behavior
|
||||||
|
# * Note that these format strings are different from the ones to display dates
|
||||||
|
DATE_INPUT_FORMATS = [
|
||||||
|
"%Y-%m-%d", # '2006-10-25'
|
||||||
|
"%m/%d/%Y", # '10/25/2006'
|
||||||
|
"%m/%d/%y", # '10/25/06'
|
||||||
|
"%b %d %Y", # 'Oct 25 2006'
|
||||||
|
"%b %d, %Y", # 'Oct 25, 2006'
|
||||||
|
"%d %b %Y", # '25 Oct 2006'
|
||||||
|
"%d %b, %Y", # '25 Oct, 2006'
|
||||||
|
"%B %d %Y", # 'October 25 2006'
|
||||||
|
"%B %d, %Y", # 'October 25, 2006'
|
||||||
|
"%d %B %Y", # '25 October 2006'
|
||||||
|
"%d %B, %Y", # '25 October, 2006'
|
||||||
|
]
|
||||||
|
|
||||||
|
# Default formats to be used when parsing times from input boxes, in order
|
||||||
|
# See all available format string here:
|
||||||
|
# https://docs.python.org/library/datetime.html#strftime-behavior
|
||||||
|
# * Note that these format strings are different from the ones to display dates
|
||||||
|
TIME_INPUT_FORMATS = [
|
||||||
|
"%H:%M:%S", # '14:30:59'
|
||||||
|
"%H:%M:%S.%f", # '14:30:59.000200'
|
||||||
|
"%H:%M", # '14:30'
|
||||||
|
]
|
||||||
|
|
||||||
|
# Default formats to be used when parsing dates and times from input boxes,
|
||||||
|
# in order
|
||||||
|
# See all available format string here:
|
||||||
|
# https://docs.python.org/library/datetime.html#strftime-behavior
|
||||||
|
# * Note that these format strings are different from the ones to display dates
|
||||||
|
DATETIME_INPUT_FORMATS = [
|
||||||
|
"%Y-%m-%d %H:%M:%S", # '2006-10-25 14:30:59'
|
||||||
|
"%Y-%m-%d %H:%M:%S.%f", # '2006-10-25 14:30:59.000200'
|
||||||
|
"%Y-%m-%d %H:%M", # '2006-10-25 14:30'
|
||||||
|
"%m/%d/%Y %H:%M:%S", # '10/25/2006 14:30:59'
|
||||||
|
"%m/%d/%Y %H:%M:%S.%f", # '10/25/2006 14:30:59.000200'
|
||||||
|
"%m/%d/%Y %H:%M", # '10/25/2006 14:30'
|
||||||
|
"%m/%d/%y %H:%M:%S", # '10/25/06 14:30:59'
|
||||||
|
"%m/%d/%y %H:%M:%S.%f", # '10/25/06 14:30:59.000200'
|
||||||
|
"%m/%d/%y %H:%M", # '10/25/06 14:30'
|
||||||
|
]
|
||||||
|
|
||||||
|
# First day of week, to be used on calendars
|
||||||
|
# 0 means Sunday, 1 means Monday...
|
||||||
|
FIRST_DAY_OF_WEEK = 0
|
||||||
|
|
||||||
|
# Decimal separator symbol
|
||||||
|
DECIMAL_SEPARATOR = "."
|
||||||
|
|
||||||
|
# Boolean that sets whether to add thousand separator when formatting numbers
|
||||||
|
USE_THOUSAND_SEPARATOR = False
|
||||||
|
|
||||||
|
# Number of digits that will be together, when splitting them by
|
||||||
|
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
|
||||||
|
NUMBER_GROUPING = 0
|
||||||
|
|
||||||
|
# Thousand separator symbol
|
||||||
|
THOUSAND_SEPARATOR = ","
|
||||||
|
|
||||||
|
# The tablespaces to use for each model when not specified otherwise.
|
||||||
|
DEFAULT_TABLESPACE = ""
|
||||||
|
DEFAULT_INDEX_TABLESPACE = ""
|
||||||
|
|
||||||
|
# Default primary key field type.
|
||||||
|
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
|
||||||
|
|
||||||
|
# Default X-Frame-Options header value
|
||||||
|
X_FRAME_OPTIONS = "DENY"
|
||||||
|
|
||||||
|
USE_X_FORWARDED_HOST = False
|
||||||
|
USE_X_FORWARDED_PORT = False
|
||||||
|
|
||||||
|
# The Python dotted path to the WSGI application that Django's internal server
|
||||||
|
# (runserver) will use. If `None`, the return value of
|
||||||
|
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
|
||||||
|
# behavior as previous versions of Django. Otherwise this should point to an
|
||||||
|
# actual WSGI application object.
|
||||||
|
WSGI_APPLICATION = None
|
||||||
|
|
||||||
|
# If your Django app is behind a proxy that sets a header to specify secure
|
||||||
|
# connections, AND that proxy ensures that user-submitted headers with the
|
||||||
|
# same name are ignored (so that people can't spoof it), set this value to
|
||||||
|
# a tuple of (header_name, header_value). For any requests that come in with
|
||||||
|
# that header/value, request.is_secure() will return True.
|
||||||
|
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
|
||||||
|
# you may be opening yourself up to a security risk.
|
||||||
|
SECURE_PROXY_SSL_HEADER = None
|
||||||
|
|
||||||
|
##############
|
||||||
|
# MIDDLEWARE #
|
||||||
|
##############
|
||||||
|
|
||||||
|
# List of middleware to use. Order is important; in the request phase, these
|
||||||
|
# middleware will be applied in the order given, and in the response
|
||||||
|
# phase the middleware will be applied in reverse order.
|
||||||
|
MIDDLEWARE = []
|
||||||
|
|
||||||
|
############
|
||||||
|
# SESSIONS #
|
||||||
|
############
|
||||||
|
|
||||||
|
# Cache to store session data if using the cache session backend.
|
||||||
|
SESSION_CACHE_ALIAS = "default"
|
||||||
|
# Cookie name. This can be whatever you want.
|
||||||
|
SESSION_COOKIE_NAME = "sessionid"
|
||||||
|
# Age of cookie, in seconds (default: 2 weeks).
|
||||||
|
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
|
||||||
|
# A string like "example.com", or None for standard domain cookie.
|
||||||
|
SESSION_COOKIE_DOMAIN = None
|
||||||
|
# Whether the session cookie should be secure (https:// only).
|
||||||
|
SESSION_COOKIE_SECURE = False
|
||||||
|
# The path of the session cookie.
|
||||||
|
SESSION_COOKIE_PATH = "/"
|
||||||
|
# Whether to use the HttpOnly flag.
|
||||||
|
SESSION_COOKIE_HTTPONLY = True
|
||||||
|
# Whether to set the flag restricting cookie leaks on cross-site requests.
|
||||||
|
# This can be 'Lax', 'Strict', 'None', or False to disable the flag.
|
||||||
|
SESSION_COOKIE_SAMESITE = "Lax"
|
||||||
|
# Whether to save the session data on every request.
|
||||||
|
SESSION_SAVE_EVERY_REQUEST = False
|
||||||
|
# Whether a user's session cookie expires when the web browser is closed.
|
||||||
|
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
|
||||||
|
# The module to store session data
|
||||||
|
SESSION_ENGINE = "django.contrib.sessions.backends.db"
|
||||||
|
# Directory to store session files if using the file session module. If None,
|
||||||
|
# the backend will use a sensible default.
|
||||||
|
SESSION_FILE_PATH = None
|
||||||
|
# class to serialize session data
|
||||||
|
SESSION_SERIALIZER = "django.contrib.sessions.serializers.JSONSerializer"
|
||||||
|
|
||||||
|
#########
|
||||||
|
# CACHE #
|
||||||
|
#########
|
||||||
|
|
||||||
|
# The cache backends to use.
|
||||||
|
CACHES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CACHE_MIDDLEWARE_KEY_PREFIX = ""
|
||||||
|
CACHE_MIDDLEWARE_SECONDS = 600
|
||||||
|
CACHE_MIDDLEWARE_ALIAS = "default"
|
||||||
|
|
||||||
|
##################
|
||||||
|
# AUTHENTICATION #
|
||||||
|
##################
|
||||||
|
|
||||||
|
AUTH_USER_MODEL = "auth.User"
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = ["django.contrib.auth.backends.ModelBackend"]
|
||||||
|
|
||||||
|
LOGIN_URL = "/accounts/login/"
|
||||||
|
|
||||||
|
LOGIN_REDIRECT_URL = "/accounts/profile/"
|
||||||
|
|
||||||
|
LOGOUT_REDIRECT_URL = None
|
||||||
|
|
||||||
|
# The number of seconds a password reset link is valid for (default: 3 days).
|
||||||
|
PASSWORD_RESET_TIMEOUT = 60 * 60 * 24 * 3
|
||||||
|
|
||||||
|
# the first hasher in this list is the preferred algorithm. any
|
||||||
|
# password using different algorithms will be converted automatically
|
||||||
|
# upon login
|
||||||
|
PASSWORD_HASHERS = [
|
||||||
|
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
|
||||||
|
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
|
||||||
|
"django.contrib.auth.hashers.Argon2PasswordHasher",
|
||||||
|
"django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
|
||||||
|
"django.contrib.auth.hashers.ScryptPasswordHasher",
|
||||||
|
]
|
||||||
|
|
||||||
|
AUTH_PASSWORD_VALIDATORS = []
|
||||||
|
|
||||||
|
###########
|
||||||
|
# SIGNING #
|
||||||
|
###########
|
||||||
|
|
||||||
|
SIGNING_BACKEND = "django.core.signing.TimestampSigner"
|
||||||
|
|
||||||
|
########
|
||||||
|
# CSRF #
|
||||||
|
########
|
||||||
|
|
||||||
|
# Dotted path to callable to be used as view when a request is
|
||||||
|
# rejected by the CSRF middleware.
|
||||||
|
CSRF_FAILURE_VIEW = "django.views.csrf.csrf_failure"
|
||||||
|
|
||||||
|
# Settings for CSRF cookie.
|
||||||
|
CSRF_COOKIE_NAME = "csrftoken"
|
||||||
|
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
|
||||||
|
CSRF_COOKIE_DOMAIN = None
|
||||||
|
CSRF_COOKIE_PATH = "/"
|
||||||
|
CSRF_COOKIE_SECURE = False
|
||||||
|
CSRF_COOKIE_HTTPONLY = False
|
||||||
|
CSRF_COOKIE_SAMESITE = "Lax"
|
||||||
|
CSRF_HEADER_NAME = "HTTP_X_CSRFTOKEN"
|
||||||
|
CSRF_TRUSTED_ORIGINS = []
|
||||||
|
CSRF_USE_SESSIONS = False
|
||||||
|
|
||||||
|
# Whether to mask CSRF cookie value. It's a transitional setting helpful in
|
||||||
|
# migrating multiple instance of the same project to Django 4.1+.
|
||||||
|
CSRF_COOKIE_MASKED = False
|
||||||
|
|
||||||
|
############
|
||||||
|
# MESSAGES #
|
||||||
|
############
|
||||||
|
|
||||||
|
# Class to use as messages backend
|
||||||
|
MESSAGE_STORAGE = "django.contrib.messages.storage.fallback.FallbackStorage"
|
||||||
|
|
||||||
|
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
|
||||||
|
# django.contrib.messages to avoid imports in this settings file.
|
||||||
|
|
||||||
|
###########
|
||||||
|
# LOGGING #
|
||||||
|
###########
|
||||||
|
|
||||||
|
# The callable to use to configure logging
|
||||||
|
LOGGING_CONFIG = "logging.config.dictConfig"
|
||||||
|
|
||||||
|
# Custom logging configuration.
|
||||||
|
LOGGING = {}
|
||||||
|
|
||||||
|
# Default exception reporter class used in case none has been
|
||||||
|
# specifically assigned to the HttpRequest instance.
|
||||||
|
DEFAULT_EXCEPTION_REPORTER = "django.views.debug.ExceptionReporter"
|
||||||
|
|
||||||
|
# Default exception reporter filter class used in case none has been
|
||||||
|
# specifically assigned to the HttpRequest instance.
|
||||||
|
DEFAULT_EXCEPTION_REPORTER_FILTER = "django.views.debug.SafeExceptionReporterFilter"
|
||||||
|
|
||||||
|
###########
|
||||||
|
# TESTING #
|
||||||
|
###########
|
||||||
|
|
||||||
|
# The name of the class to use to run the test suite
|
||||||
|
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||||
|
|
||||||
|
# Apps that don't need to be serialized at test database creation time
|
||||||
|
# (only apps with migrations are to start with)
|
||||||
|
TEST_NON_SERIALIZED_APPS = []
|
||||||
|
|
||||||
|
############
|
||||||
|
# FIXTURES #
|
||||||
|
############
|
||||||
|
|
||||||
|
# The list of directories to search for fixtures
|
||||||
|
FIXTURE_DIRS = []
|
||||||
|
|
||||||
|
###############
|
||||||
|
# STATICFILES #
|
||||||
|
###############
|
||||||
|
|
||||||
|
# A list of locations of additional static files
|
||||||
|
STATICFILES_DIRS = []
|
||||||
|
|
||||||
|
# The default file storage backend used during the build process
|
||||||
|
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
|
||||||
|
|
||||||
|
# List of finder classes that know how to find static files in
|
||||||
|
# various locations.
|
||||||
|
STATICFILES_FINDERS = [
|
||||||
|
"django.contrib.staticfiles.finders.FileSystemFinder",
|
||||||
|
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
|
||||||
|
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
|
||||||
|
]
|
||||||
|
|
||||||
|
##############
|
||||||
|
# MIGRATIONS #
|
||||||
|
##############
|
||||||
|
|
||||||
|
# Migration module overrides for apps, by app label.
|
||||||
|
MIGRATION_MODULES = {}
|
||||||
|
|
||||||
|
#################
|
||||||
|
# SYSTEM CHECKS #
|
||||||
|
#################
|
||||||
|
|
||||||
|
# List of all issues generated by system checks that should be silenced. Light
|
||||||
|
# issues like warnings, infos or debugs will not generate a message. Silencing
|
||||||
|
# serious issues like errors and criticals does not result in hiding the
|
||||||
|
# message, but Django will not stop you from e.g. running server.
|
||||||
|
SILENCED_SYSTEM_CHECKS = []
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# SECURITY MIDDLEWARE #
|
||||||
|
#######################
|
||||||
|
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||||
|
SECURE_CROSS_ORIGIN_OPENER_POLICY = "same-origin"
|
||||||
|
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
|
||||||
|
SECURE_HSTS_PRELOAD = False
|
||||||
|
SECURE_HSTS_SECONDS = 0
|
||||||
|
SECURE_REDIRECT_EXEMPT = []
|
||||||
|
SECURE_REFERRER_POLICY = "same-origin"
|
||||||
|
SECURE_SSL_HOST = None
|
||||||
|
SECURE_SSL_REDIRECT = False
|
@ -0,0 +1,623 @@
|
|||||||
|
"""
|
||||||
|
LANG_INFO is a dictionary structure to provide meta information about languages.
|
||||||
|
|
||||||
|
About name_local: capitalize it as if your language name was appearing
|
||||||
|
inside a sentence in your language.
|
||||||
|
The 'fallback' key can be used to specify a special fallback logic which doesn't
|
||||||
|
follow the traditional 'fr-ca' -> 'fr' fallback logic.
|
||||||
|
"""
|
||||||
|
|
||||||
|
LANG_INFO = {
|
||||||
|
"af": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "af",
|
||||||
|
"name": "Afrikaans",
|
||||||
|
"name_local": "Afrikaans",
|
||||||
|
},
|
||||||
|
"ar": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "ar",
|
||||||
|
"name": "Arabic",
|
||||||
|
"name_local": "العربيّة",
|
||||||
|
},
|
||||||
|
"ar-dz": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "ar-dz",
|
||||||
|
"name": "Algerian Arabic",
|
||||||
|
"name_local": "العربية الجزائرية",
|
||||||
|
},
|
||||||
|
"ast": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ast",
|
||||||
|
"name": "Asturian",
|
||||||
|
"name_local": "asturianu",
|
||||||
|
},
|
||||||
|
"az": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "az",
|
||||||
|
"name": "Azerbaijani",
|
||||||
|
"name_local": "Azərbaycanca",
|
||||||
|
},
|
||||||
|
"be": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "be",
|
||||||
|
"name": "Belarusian",
|
||||||
|
"name_local": "беларуская",
|
||||||
|
},
|
||||||
|
"bg": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "bg",
|
||||||
|
"name": "Bulgarian",
|
||||||
|
"name_local": "български",
|
||||||
|
},
|
||||||
|
"bn": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "bn",
|
||||||
|
"name": "Bengali",
|
||||||
|
"name_local": "বাংলা",
|
||||||
|
},
|
||||||
|
"br": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "br",
|
||||||
|
"name": "Breton",
|
||||||
|
"name_local": "brezhoneg",
|
||||||
|
},
|
||||||
|
"bs": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "bs",
|
||||||
|
"name": "Bosnian",
|
||||||
|
"name_local": "bosanski",
|
||||||
|
},
|
||||||
|
"ca": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ca",
|
||||||
|
"name": "Catalan",
|
||||||
|
"name_local": "català",
|
||||||
|
},
|
||||||
|
"ckb": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "ckb",
|
||||||
|
"name": "Central Kurdish (Sorani)",
|
||||||
|
"name_local": "کوردی",
|
||||||
|
},
|
||||||
|
"cs": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "cs",
|
||||||
|
"name": "Czech",
|
||||||
|
"name_local": "česky",
|
||||||
|
},
|
||||||
|
"cy": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "cy",
|
||||||
|
"name": "Welsh",
|
||||||
|
"name_local": "Cymraeg",
|
||||||
|
},
|
||||||
|
"da": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "da",
|
||||||
|
"name": "Danish",
|
||||||
|
"name_local": "dansk",
|
||||||
|
},
|
||||||
|
"de": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "de",
|
||||||
|
"name": "German",
|
||||||
|
"name_local": "Deutsch",
|
||||||
|
},
|
||||||
|
"dsb": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "dsb",
|
||||||
|
"name": "Lower Sorbian",
|
||||||
|
"name_local": "dolnoserbski",
|
||||||
|
},
|
||||||
|
"el": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "el",
|
||||||
|
"name": "Greek",
|
||||||
|
"name_local": "Ελληνικά",
|
||||||
|
},
|
||||||
|
"en": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "en",
|
||||||
|
"name": "English",
|
||||||
|
"name_local": "English",
|
||||||
|
},
|
||||||
|
"en-au": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "en-au",
|
||||||
|
"name": "Australian English",
|
||||||
|
"name_local": "Australian English",
|
||||||
|
},
|
||||||
|
"en-gb": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "en-gb",
|
||||||
|
"name": "British English",
|
||||||
|
"name_local": "British English",
|
||||||
|
},
|
||||||
|
"eo": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "eo",
|
||||||
|
"name": "Esperanto",
|
||||||
|
"name_local": "Esperanto",
|
||||||
|
},
|
||||||
|
"es": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "es",
|
||||||
|
"name": "Spanish",
|
||||||
|
"name_local": "español",
|
||||||
|
},
|
||||||
|
"es-ar": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "es-ar",
|
||||||
|
"name": "Argentinian Spanish",
|
||||||
|
"name_local": "español de Argentina",
|
||||||
|
},
|
||||||
|
"es-co": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "es-co",
|
||||||
|
"name": "Colombian Spanish",
|
||||||
|
"name_local": "español de Colombia",
|
||||||
|
},
|
||||||
|
"es-mx": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "es-mx",
|
||||||
|
"name": "Mexican Spanish",
|
||||||
|
"name_local": "español de Mexico",
|
||||||
|
},
|
||||||
|
"es-ni": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "es-ni",
|
||||||
|
"name": "Nicaraguan Spanish",
|
||||||
|
"name_local": "español de Nicaragua",
|
||||||
|
},
|
||||||
|
"es-ve": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "es-ve",
|
||||||
|
"name": "Venezuelan Spanish",
|
||||||
|
"name_local": "español de Venezuela",
|
||||||
|
},
|
||||||
|
"et": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "et",
|
||||||
|
"name": "Estonian",
|
||||||
|
"name_local": "eesti",
|
||||||
|
},
|
||||||
|
"eu": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "eu",
|
||||||
|
"name": "Basque",
|
||||||
|
"name_local": "Basque",
|
||||||
|
},
|
||||||
|
"fa": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "fa",
|
||||||
|
"name": "Persian",
|
||||||
|
"name_local": "فارسی",
|
||||||
|
},
|
||||||
|
"fi": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "fi",
|
||||||
|
"name": "Finnish",
|
||||||
|
"name_local": "suomi",
|
||||||
|
},
|
||||||
|
"fr": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "fr",
|
||||||
|
"name": "French",
|
||||||
|
"name_local": "français",
|
||||||
|
},
|
||||||
|
"fy": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "fy",
|
||||||
|
"name": "Frisian",
|
||||||
|
"name_local": "frysk",
|
||||||
|
},
|
||||||
|
"ga": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ga",
|
||||||
|
"name": "Irish",
|
||||||
|
"name_local": "Gaeilge",
|
||||||
|
},
|
||||||
|
"gd": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "gd",
|
||||||
|
"name": "Scottish Gaelic",
|
||||||
|
"name_local": "Gàidhlig",
|
||||||
|
},
|
||||||
|
"gl": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "gl",
|
||||||
|
"name": "Galician",
|
||||||
|
"name_local": "galego",
|
||||||
|
},
|
||||||
|
"he": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "he",
|
||||||
|
"name": "Hebrew",
|
||||||
|
"name_local": "עברית",
|
||||||
|
},
|
||||||
|
"hi": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "hi",
|
||||||
|
"name": "Hindi",
|
||||||
|
"name_local": "हिंदी",
|
||||||
|
},
|
||||||
|
"hr": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "hr",
|
||||||
|
"name": "Croatian",
|
||||||
|
"name_local": "Hrvatski",
|
||||||
|
},
|
||||||
|
"hsb": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "hsb",
|
||||||
|
"name": "Upper Sorbian",
|
||||||
|
"name_local": "hornjoserbsce",
|
||||||
|
},
|
||||||
|
"hu": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "hu",
|
||||||
|
"name": "Hungarian",
|
||||||
|
"name_local": "Magyar",
|
||||||
|
},
|
||||||
|
"hy": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "hy",
|
||||||
|
"name": "Armenian",
|
||||||
|
"name_local": "հայերեն",
|
||||||
|
},
|
||||||
|
"ia": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ia",
|
||||||
|
"name": "Interlingua",
|
||||||
|
"name_local": "Interlingua",
|
||||||
|
},
|
||||||
|
"io": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "io",
|
||||||
|
"name": "Ido",
|
||||||
|
"name_local": "ido",
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "id",
|
||||||
|
"name": "Indonesian",
|
||||||
|
"name_local": "Bahasa Indonesia",
|
||||||
|
},
|
||||||
|
"ig": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ig",
|
||||||
|
"name": "Igbo",
|
||||||
|
"name_local": "Asụsụ Ìgbò",
|
||||||
|
},
|
||||||
|
"is": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "is",
|
||||||
|
"name": "Icelandic",
|
||||||
|
"name_local": "Íslenska",
|
||||||
|
},
|
||||||
|
"it": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "it",
|
||||||
|
"name": "Italian",
|
||||||
|
"name_local": "italiano",
|
||||||
|
},
|
||||||
|
"ja": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ja",
|
||||||
|
"name": "Japanese",
|
||||||
|
"name_local": "日本語",
|
||||||
|
},
|
||||||
|
"ka": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ka",
|
||||||
|
"name": "Georgian",
|
||||||
|
"name_local": "ქართული",
|
||||||
|
},
|
||||||
|
"kab": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "kab",
|
||||||
|
"name": "Kabyle",
|
||||||
|
"name_local": "taqbaylit",
|
||||||
|
},
|
||||||
|
"kk": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "kk",
|
||||||
|
"name": "Kazakh",
|
||||||
|
"name_local": "Қазақ",
|
||||||
|
},
|
||||||
|
"km": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "km",
|
||||||
|
"name": "Khmer",
|
||||||
|
"name_local": "Khmer",
|
||||||
|
},
|
||||||
|
"kn": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "kn",
|
||||||
|
"name": "Kannada",
|
||||||
|
"name_local": "Kannada",
|
||||||
|
},
|
||||||
|
"ko": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ko",
|
||||||
|
"name": "Korean",
|
||||||
|
"name_local": "한국어",
|
||||||
|
},
|
||||||
|
"ky": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ky",
|
||||||
|
"name": "Kyrgyz",
|
||||||
|
"name_local": "Кыргызча",
|
||||||
|
},
|
||||||
|
"lb": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "lb",
|
||||||
|
"name": "Luxembourgish",
|
||||||
|
"name_local": "Lëtzebuergesch",
|
||||||
|
},
|
||||||
|
"lt": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "lt",
|
||||||
|
"name": "Lithuanian",
|
||||||
|
"name_local": "Lietuviškai",
|
||||||
|
},
|
||||||
|
"lv": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "lv",
|
||||||
|
"name": "Latvian",
|
||||||
|
"name_local": "latviešu",
|
||||||
|
},
|
||||||
|
"mk": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "mk",
|
||||||
|
"name": "Macedonian",
|
||||||
|
"name_local": "Македонски",
|
||||||
|
},
|
||||||
|
"ml": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ml",
|
||||||
|
"name": "Malayalam",
|
||||||
|
"name_local": "മലയാളം",
|
||||||
|
},
|
||||||
|
"mn": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "mn",
|
||||||
|
"name": "Mongolian",
|
||||||
|
"name_local": "Mongolian",
|
||||||
|
},
|
||||||
|
"mr": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "mr",
|
||||||
|
"name": "Marathi",
|
||||||
|
"name_local": "मराठी",
|
||||||
|
},
|
||||||
|
"ms": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ms",
|
||||||
|
"name": "Malay",
|
||||||
|
"name_local": "Bahasa Melayu",
|
||||||
|
},
|
||||||
|
"my": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "my",
|
||||||
|
"name": "Burmese",
|
||||||
|
"name_local": "မြန်မာဘာသာ",
|
||||||
|
},
|
||||||
|
"nb": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "nb",
|
||||||
|
"name": "Norwegian Bokmal",
|
||||||
|
"name_local": "norsk (bokmål)",
|
||||||
|
},
|
||||||
|
"ne": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ne",
|
||||||
|
"name": "Nepali",
|
||||||
|
"name_local": "नेपाली",
|
||||||
|
},
|
||||||
|
"nl": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "nl",
|
||||||
|
"name": "Dutch",
|
||||||
|
"name_local": "Nederlands",
|
||||||
|
},
|
||||||
|
"nn": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "nn",
|
||||||
|
"name": "Norwegian Nynorsk",
|
||||||
|
"name_local": "norsk (nynorsk)",
|
||||||
|
},
|
||||||
|
"no": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "no",
|
||||||
|
"name": "Norwegian",
|
||||||
|
"name_local": "norsk",
|
||||||
|
},
|
||||||
|
"os": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "os",
|
||||||
|
"name": "Ossetic",
|
||||||
|
"name_local": "Ирон",
|
||||||
|
},
|
||||||
|
"pa": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "pa",
|
||||||
|
"name": "Punjabi",
|
||||||
|
"name_local": "Punjabi",
|
||||||
|
},
|
||||||
|
"pl": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "pl",
|
||||||
|
"name": "Polish",
|
||||||
|
"name_local": "polski",
|
||||||
|
},
|
||||||
|
"pt": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "pt",
|
||||||
|
"name": "Portuguese",
|
||||||
|
"name_local": "Português",
|
||||||
|
},
|
||||||
|
"pt-br": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "pt-br",
|
||||||
|
"name": "Brazilian Portuguese",
|
||||||
|
"name_local": "Português Brasileiro",
|
||||||
|
},
|
||||||
|
"ro": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ro",
|
||||||
|
"name": "Romanian",
|
||||||
|
"name_local": "Română",
|
||||||
|
},
|
||||||
|
"ru": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ru",
|
||||||
|
"name": "Russian",
|
||||||
|
"name_local": "Русский",
|
||||||
|
},
|
||||||
|
"sk": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sk",
|
||||||
|
"name": "Slovak",
|
||||||
|
"name_local": "Slovensky",
|
||||||
|
},
|
||||||
|
"sl": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sl",
|
||||||
|
"name": "Slovenian",
|
||||||
|
"name_local": "Slovenščina",
|
||||||
|
},
|
||||||
|
"sq": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sq",
|
||||||
|
"name": "Albanian",
|
||||||
|
"name_local": "shqip",
|
||||||
|
},
|
||||||
|
"sr": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sr",
|
||||||
|
"name": "Serbian",
|
||||||
|
"name_local": "српски",
|
||||||
|
},
|
||||||
|
"sr-latn": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sr-latn",
|
||||||
|
"name": "Serbian Latin",
|
||||||
|
"name_local": "srpski (latinica)",
|
||||||
|
},
|
||||||
|
"sv": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sv",
|
||||||
|
"name": "Swedish",
|
||||||
|
"name_local": "svenska",
|
||||||
|
},
|
||||||
|
"sw": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "sw",
|
||||||
|
"name": "Swahili",
|
||||||
|
"name_local": "Kiswahili",
|
||||||
|
},
|
||||||
|
"ta": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "ta",
|
||||||
|
"name": "Tamil",
|
||||||
|
"name_local": "தமிழ்",
|
||||||
|
},
|
||||||
|
"te": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "te",
|
||||||
|
"name": "Telugu",
|
||||||
|
"name_local": "తెలుగు",
|
||||||
|
},
|
||||||
|
"tg": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "tg",
|
||||||
|
"name": "Tajik",
|
||||||
|
"name_local": "тоҷикӣ",
|
||||||
|
},
|
||||||
|
"th": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "th",
|
||||||
|
"name": "Thai",
|
||||||
|
"name_local": "ภาษาไทย",
|
||||||
|
},
|
||||||
|
"tk": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "tk",
|
||||||
|
"name": "Turkmen",
|
||||||
|
"name_local": "Türkmençe",
|
||||||
|
},
|
||||||
|
"tr": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "tr",
|
||||||
|
"name": "Turkish",
|
||||||
|
"name_local": "Türkçe",
|
||||||
|
},
|
||||||
|
"tt": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "tt",
|
||||||
|
"name": "Tatar",
|
||||||
|
"name_local": "Татарча",
|
||||||
|
},
|
||||||
|
"udm": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "udm",
|
||||||
|
"name": "Udmurt",
|
||||||
|
"name_local": "Удмурт",
|
||||||
|
},
|
||||||
|
"uk": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "uk",
|
||||||
|
"name": "Ukrainian",
|
||||||
|
"name_local": "Українська",
|
||||||
|
},
|
||||||
|
"ur": {
|
||||||
|
"bidi": True,
|
||||||
|
"code": "ur",
|
||||||
|
"name": "Urdu",
|
||||||
|
"name_local": "اردو",
|
||||||
|
},
|
||||||
|
"uz": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "uz",
|
||||||
|
"name": "Uzbek",
|
||||||
|
"name_local": "oʻzbek tili",
|
||||||
|
},
|
||||||
|
"vi": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "vi",
|
||||||
|
"name": "Vietnamese",
|
||||||
|
"name_local": "Tiếng Việt",
|
||||||
|
},
|
||||||
|
"zh-cn": {
|
||||||
|
"fallback": ["zh-hans"],
|
||||||
|
},
|
||||||
|
"zh-hans": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "zh-hans",
|
||||||
|
"name": "Simplified Chinese",
|
||||||
|
"name_local": "简体中文",
|
||||||
|
},
|
||||||
|
"zh-hant": {
|
||||||
|
"bidi": False,
|
||||||
|
"code": "zh-hant",
|
||||||
|
"name": "Traditional Chinese",
|
||||||
|
"name_local": "繁體中文",
|
||||||
|
},
|
||||||
|
"zh-hk": {
|
||||||
|
"fallback": ["zh-hant"],
|
||||||
|
},
|
||||||
|
"zh-mo": {
|
||||||
|
"fallback": ["zh-hant"],
|
||||||
|
},
|
||||||
|
"zh-my": {
|
||||||
|
"fallback": ["zh-hans"],
|
||||||
|
},
|
||||||
|
"zh-sg": {
|
||||||
|
"fallback": ["zh-hans"],
|
||||||
|
},
|
||||||
|
"zh-tw": {
|
||||||
|
"fallback": ["zh-hant"],
|
||||||
|
},
|
||||||
|
}
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,21 @@
|
|||||||
|
# This file is distributed under the same license as the Django package.
|
||||||
|
#
|
||||||
|
# The *_FORMAT strings use the Django date format syntax,
|
||||||
|
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "j F، Y"
|
||||||
|
TIME_FORMAT = "g:i A"
|
||||||
|
# DATETIME_FORMAT =
|
||||||
|
YEAR_MONTH_FORMAT = "F Y"
|
||||||
|
MONTH_DAY_FORMAT = "j F"
|
||||||
|
SHORT_DATE_FORMAT = "d/m/Y"
|
||||||
|
# SHORT_DATETIME_FORMAT =
|
||||||
|
# FIRST_DAY_OF_WEEK =
|
||||||
|
|
||||||
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
|
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
# DATE_INPUT_FORMATS =
|
||||||
|
# TIME_INPUT_FORMATS =
|
||||||
|
# DATETIME_INPUT_FORMATS =
|
||||||
|
DECIMAL_SEPARATOR = ","
|
||||||
|
THOUSAND_SEPARATOR = "."
|
||||||
|
# NUMBER_GROUPING =
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,29 @@
|
|||||||
|
# This file is distributed under the same license as the Django package.
|
||||||
|
#
|
||||||
|
# The *_FORMAT strings use the Django date format syntax,
|
||||||
|
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "j F Y"
|
||||||
|
TIME_FORMAT = "H:i"
|
||||||
|
DATETIME_FORMAT = "j F Y H:i"
|
||||||
|
YEAR_MONTH_FORMAT = "F Y"
|
||||||
|
MONTH_DAY_FORMAT = "j F"
|
||||||
|
SHORT_DATE_FORMAT = "j F Y"
|
||||||
|
SHORT_DATETIME_FORMAT = "j F Y H:i"
|
||||||
|
FIRST_DAY_OF_WEEK = 0 # Sunday
|
||||||
|
|
||||||
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
|
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
DATE_INPUT_FORMATS = [
|
||||||
|
"%Y/%m/%d", # '2006/10/25'
|
||||||
|
]
|
||||||
|
TIME_INPUT_FORMATS = [
|
||||||
|
"%H:%M", # '14:30
|
||||||
|
"%H:%M:%S", # '14:30:59'
|
||||||
|
]
|
||||||
|
DATETIME_INPUT_FORMATS = [
|
||||||
|
"%Y/%m/%d %H:%M", # '2006/10/25 14:30'
|
||||||
|
"%Y/%m/%d %H:%M:%S", # '2006/10/25 14:30:59'
|
||||||
|
]
|
||||||
|
DECIMAL_SEPARATOR = ","
|
||||||
|
THOUSAND_SEPARATOR = "."
|
||||||
|
NUMBER_GROUPING = 3
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,30 @@
|
|||||||
|
# This file is distributed under the same license as the Django package.
|
||||||
|
#
|
||||||
|
# The *_FORMAT strings use the Django date format syntax,
|
||||||
|
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "j E Y"
|
||||||
|
TIME_FORMAT = "G:i"
|
||||||
|
DATETIME_FORMAT = "j E Y, G:i"
|
||||||
|
YEAR_MONTH_FORMAT = "F Y"
|
||||||
|
MONTH_DAY_FORMAT = "j F"
|
||||||
|
SHORT_DATE_FORMAT = "d.m.Y"
|
||||||
|
SHORT_DATETIME_FORMAT = "d.m.Y H:i"
|
||||||
|
FIRST_DAY_OF_WEEK = 1 # Monday
|
||||||
|
|
||||||
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
|
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
DATE_INPUT_FORMATS = [
|
||||||
|
"%d.%m.%Y", # '25.10.2006'
|
||||||
|
"%d.%m.%y", # '25.10.06'
|
||||||
|
]
|
||||||
|
DATETIME_INPUT_FORMATS = [
|
||||||
|
"%d.%m.%Y %H:%M:%S", # '25.10.2006 14:30:59'
|
||||||
|
"%d.%m.%Y %H:%M:%S.%f", # '25.10.2006 14:30:59.000200'
|
||||||
|
"%d.%m.%Y %H:%M", # '25.10.2006 14:30'
|
||||||
|
"%d.%m.%y %H:%M:%S", # '25.10.06 14:30:59'
|
||||||
|
"%d.%m.%y %H:%M:%S.%f", # '25.10.06 14:30:59.000200'
|
||||||
|
"%d.%m.%y %H:%M", # '25.10.06 14:30'
|
||||||
|
]
|
||||||
|
DECIMAL_SEPARATOR = ","
|
||||||
|
THOUSAND_SEPARATOR = "\xa0" # non-breaking space
|
||||||
|
NUMBER_GROUPING = 3
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,21 @@
|
|||||||
|
# This file is distributed under the same license as the Django package.
|
||||||
|
#
|
||||||
|
# The *_FORMAT strings use the Django date format syntax,
|
||||||
|
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "d F Y"
|
||||||
|
TIME_FORMAT = "H:i"
|
||||||
|
# DATETIME_FORMAT =
|
||||||
|
# YEAR_MONTH_FORMAT =
|
||||||
|
MONTH_DAY_FORMAT = "j F"
|
||||||
|
SHORT_DATE_FORMAT = "d.m.Y"
|
||||||
|
# SHORT_DATETIME_FORMAT =
|
||||||
|
# FIRST_DAY_OF_WEEK =
|
||||||
|
|
||||||
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
|
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
# DATE_INPUT_FORMATS =
|
||||||
|
# TIME_INPUT_FORMATS =
|
||||||
|
# DATETIME_INPUT_FORMATS =
|
||||||
|
DECIMAL_SEPARATOR = ","
|
||||||
|
THOUSAND_SEPARATOR = " " # Non-breaking space
|
||||||
|
# NUMBER_GROUPING =
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,32 @@
|
|||||||
|
# This file is distributed under the same license as the Django package.
|
||||||
|
#
|
||||||
|
# The *_FORMAT strings use the Django date format syntax,
|
||||||
|
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "j F, Y"
|
||||||
|
TIME_FORMAT = "g:i A"
|
||||||
|
# DATETIME_FORMAT =
|
||||||
|
YEAR_MONTH_FORMAT = "F Y"
|
||||||
|
MONTH_DAY_FORMAT = "j F"
|
||||||
|
SHORT_DATE_FORMAT = "j M, Y"
|
||||||
|
# SHORT_DATETIME_FORMAT =
|
||||||
|
FIRST_DAY_OF_WEEK = 6 # Saturday
|
||||||
|
|
||||||
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
|
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
DATE_INPUT_FORMATS = [
|
||||||
|
"%d/%m/%Y", # 25/10/2016
|
||||||
|
"%d/%m/%y", # 25/10/16
|
||||||
|
"%d-%m-%Y", # 25-10-2016
|
||||||
|
"%d-%m-%y", # 25-10-16
|
||||||
|
]
|
||||||
|
TIME_INPUT_FORMATS = [
|
||||||
|
"%H:%M:%S", # 14:30:59
|
||||||
|
"%H:%M", # 14:30
|
||||||
|
]
|
||||||
|
DATETIME_INPUT_FORMATS = [
|
||||||
|
"%d/%m/%Y %H:%M:%S", # 25/10/2006 14:30:59
|
||||||
|
"%d/%m/%Y %H:%M", # 25/10/2006 14:30
|
||||||
|
]
|
||||||
|
DECIMAL_SEPARATOR = "."
|
||||||
|
THOUSAND_SEPARATOR = ","
|
||||||
|
# NUMBER_GROUPING =
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,21 @@
|
|||||||
|
# This file is distributed under the same license as the Django package.
|
||||||
|
#
|
||||||
|
# The *_FORMAT strings use the Django date format syntax,
|
||||||
|
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||||
|
DATE_FORMAT = "j. N Y."
|
||||||
|
TIME_FORMAT = "G:i"
|
||||||
|
DATETIME_FORMAT = "j. N. Y. G:i T"
|
||||||
|
YEAR_MONTH_FORMAT = "F Y."
|
||||||
|
MONTH_DAY_FORMAT = "j. F"
|
||||||
|
SHORT_DATE_FORMAT = "Y M j"
|
||||||
|
# SHORT_DATETIME_FORMAT =
|
||||||
|
# FIRST_DAY_OF_WEEK =
|
||||||
|
|
||||||
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
|
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
# DATE_INPUT_FORMATS =
|
||||||
|
# TIME_INPUT_FORMATS =
|
||||||
|
# DATETIME_INPUT_FORMATS =
|
||||||
|
DECIMAL_SEPARATOR = ","
|
||||||
|
THOUSAND_SEPARATOR = "."
|
||||||
|
# NUMBER_GROUPING =
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user