RoshanV1
This commit is contained in:
parent
21ef5153ee
commit
5f3f4e93ff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -197,6 +197,21 @@
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<!-- Demo Notice -->
|
||||
{% if user.username == 'demo_user' %}
|
||||
<div class="alert alert-info border-0 rounded-4 shadow-sm mb-4 p-3" style="background: linear-gradient(90deg, #f0f7ff 0%, #ffffff 100%); border-left: 5px solid #00529b !important;">
|
||||
<div class="d-flex align-items-center">
|
||||
<div class="icon-box bg-primary bg-opacity-10 text-primary rounded-circle p-2 me-3">
|
||||
<i class="bi bi-info-circle-fill fs-4"></i>
|
||||
</div>
|
||||
<div>
|
||||
<h6 class="mb-1 fw-bold" style="color: #00529b;">Demo Mode Active</h6>
|
||||
<p class="mb-0 small text-secondary">You are logged in as a <b>Demo User</b>. Explore the platform's features, but note that some real-world integrations like SMS are simulated.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if involved_events %}
|
||||
<!-- Active involvements / Message section -->
|
||||
<div class="row mb-4">
|
||||
@ -681,4 +696,4 @@
|
||||
}
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
@ -15,6 +15,16 @@
|
||||
<p class="text-secondary small">{% trans "Secure access for registered members" %}</p>
|
||||
</div>
|
||||
|
||||
<!-- Demo Notice -->
|
||||
<div class="alert alert-info border-0 rounded-3 mb-4 small shadow-sm" style="background-color: #f0f7ff; color: #00529b;">
|
||||
<div class="d-flex align-items-center">
|
||||
<i class="bi bi-info-circle-fill me-2 fs-5"></i>
|
||||
<div>
|
||||
<strong>Demo Mode Active:</strong> Use <b>demo_user</b> / <b>demo1234</b>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if form.non_field_errors %}
|
||||
<div class="alert alert-danger border-0 rounded-3 mb-4 small">
|
||||
{% for error in form.non_field_errors %}
|
||||
@ -26,7 +36,7 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<form method="post" class="needs-validation">
|
||||
<form method="post" class="needs-validation" id="loginForm">
|
||||
{% csrf_token %}
|
||||
{% for field in form %}
|
||||
<div class="mb-4">
|
||||
@ -50,9 +60,15 @@
|
||||
<a href="#" class="text-danger small text-decoration-none fw-500">Forgot password?</a>
|
||||
</div>
|
||||
|
||||
<button type="submit" class="btn btn-danger w-100 py-3 fw-bold mt-2 shadow-sm" style="background-color: #E63946; border: none; border-radius: 8px;">
|
||||
{% trans "LOG IN" %} <i class="bi bi-arrow-right-short ms-1"></i>
|
||||
</button>
|
||||
<div class="d-grid gap-2">
|
||||
<button type="submit" class="btn btn-danger py-3 fw-bold shadow-sm" style="background-color: #E63946; border: none; border-radius: 8px;">
|
||||
{% trans "LOG IN" %} <i class="bi bi-arrow-right-short ms-1"></i>
|
||||
</button>
|
||||
|
||||
<button type="button" onclick="fillDemoCredentials()" class="btn btn-outline-dark py-2 small fw-bold border-2" style="border-radius: 8px;">
|
||||
<i class="bi bi-person-check me-1"></i> Use Demo Account
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div class="text-center mt-5">
|
||||
@ -66,6 +82,19 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function fillDemoCredentials() {
|
||||
const usernameField = document.querySelector('input[name="username"]');
|
||||
const passwordField = document.querySelector('input[name="password"]');
|
||||
if (usernameField && passwordField) {
|
||||
usernameField.value = 'demo_user';
|
||||
passwordField.value = 'demo1234';
|
||||
// Optional: submit the form automatically
|
||||
// document.getElementById('loginForm').submit();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
input:not([type="checkbox"]) {
|
||||
display: block;
|
||||
|
||||
@ -199,6 +199,9 @@
|
||||
<div class="d-flex gap-3 flex-wrap">
|
||||
<a href="{% url 'register' %}" class="btn btn-hero btn-hero-primary">Get Started</a>
|
||||
<a href="{% url 'login' %}" class="btn btn-hero btn-outline-dark">System Login</a>
|
||||
<button onclick="directDemoLogin()" class="btn btn-hero btn-dark bg-dark text-white border-0 shadow-sm" style="background-color: #111827 !important;">
|
||||
<i class="bi bi-person-badge me-2 text-danger"></i> Demo Access
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-lg-6 text-center hero-image mt-5 mt-lg-0">
|
||||
@ -294,4 +297,16 @@
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
<form id="hiddenDemoLoginForm" method="POST" action="{% url 'login' %}" style="display:none;">
|
||||
{% csrf_token %}
|
||||
<input type="hidden" name="username" value="demo_user">
|
||||
<input type="hidden" name="password" value="demo1234">
|
||||
</form>
|
||||
|
||||
<script>
|
||||
function directDemoLogin() {
|
||||
document.getElementById('hiddenDemoLoginForm').submit();
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
@ -318,11 +318,7 @@ def home(request):
|
||||
blood_banks = BloodBank.objects.all()
|
||||
|
||||
# Stats for Dashboard
|
||||
demo_donations = 157
|
||||
demo_donors = 48
|
||||
|
||||
actual_completed = DonationEvent.objects.filter(is_completed=True).count()
|
||||
completed_donations = actual_completed + demo_donations
|
||||
completed_donations = DonationEvent.objects.filter(is_completed=True).count()
|
||||
|
||||
# Find Recent Contributions (Last 24 Hours)
|
||||
last_24_hours = timezone.now() - timezone.timedelta(hours=24)
|
||||
@ -332,7 +328,7 @@ def home(request):
|
||||
).select_related('donor').order_by('-date')
|
||||
|
||||
stats = {
|
||||
"total_donors": Donor.objects.count() + demo_donors,
|
||||
"total_donors": Donor.objects.count(),
|
||||
"active_requests": BloodRequest.objects.filter(
|
||||
Q(status='Active') | Q(status='Accepted', accepted_at__gte=three_days_ago)
|
||||
).count(),
|
||||
@ -628,13 +624,11 @@ def lives_saved(request):
|
||||
completed_donations = DonationEvent.objects.filter(is_completed=True).select_related('donor_user', 'request').order_by('-date')
|
||||
|
||||
total_donations = completed_donations.count()
|
||||
# Demo data from home view to keep consistency
|
||||
demo_donations = 157
|
||||
total_impact = (total_donations + demo_donations) * 3
|
||||
total_impact = total_donations * 3
|
||||
|
||||
context = {
|
||||
'donations': completed_donations[:10], # Show recent impact
|
||||
'total_donations': total_donations + demo_donations,
|
||||
'total_donations': total_donations,
|
||||
'total_impact': total_impact,
|
||||
'title': 'Lives Saved & Community Impact',
|
||||
}
|
||||
@ -930,4 +924,4 @@ def upload_health_report(request):
|
||||
else:
|
||||
messages.error(request, "Please fill in all required fields.")
|
||||
|
||||
return render(request, 'core/upload_health_report.html')
|
||||
return render(request, 'core/upload_health_report.html')
|
||||
0
server.log
Normal file
0
server.log
Normal file
247
venv/bin/Activate.ps1
Normal file
247
venv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
69
venv/bin/activate
Normal file
69
venv/bin/activate
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV=/home/ubuntu/executor/workspace/venv
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(venv) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(venv) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
26
venv/bin/activate.csh
Normal file
26
venv/bin/activate.csh
Normal file
@ -0,0 +1,26 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /home/ubuntu/executor/workspace/venv
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(venv) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(venv) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
venv/bin/activate.fish
Normal file
69
venv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /home/ubuntu/executor/workspace/venv
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(venv) '
|
||||
end
|
||||
8
venv/bin/django-admin
Executable file
8
venv/bin/django-admin
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ubuntu/executor/workspace/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from django.core.management import execute_from_command_line
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(execute_from_command_line())
|
||||
8
venv/bin/dotenv
Executable file
8
venv/bin/dotenv
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ubuntu/executor/workspace/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.__main__ import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
8
venv/bin/pip
Executable file
8
venv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ubuntu/executor/workspace/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3
Executable file
8
venv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ubuntu/executor/workspace/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3.11
Executable file
8
venv/bin/pip3.11
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ubuntu/executor/workspace/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
1
venv/bin/python
Symbolic link
1
venv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
||||
1
venv/bin/python3
Symbolic link
1
venv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
||||
1
venv/bin/python3.11
Symbolic link
1
venv/bin/python3.11
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
||||
8
venv/bin/sqlformat
Executable file
8
venv/bin/sqlformat
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/ubuntu/executor/workspace/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from sqlparse.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
168
venv/lib/python3.11/site-packages/MySQLdb/__init__.py
Normal file
168
venv/lib/python3.11/site-packages/MySQLdb/__init__.py
Normal file
@ -0,0 +1,168 @@
|
||||
"""
|
||||
MySQLdb - A DB API v2.0 compatible interface to MySQL.
|
||||
|
||||
This package is a wrapper around _mysql, which mostly implements the
|
||||
MySQL C API.
|
||||
|
||||
connect() -- connects to server
|
||||
|
||||
See the C API specification and the MySQL documentation for more info
|
||||
on other items.
|
||||
|
||||
For information on how MySQLdb handles type conversion, see the
|
||||
MySQLdb.converters module.
|
||||
"""
|
||||
|
||||
from .release import version_info
|
||||
from . import _mysql
|
||||
|
||||
if version_info != _mysql.version_info:
|
||||
raise ImportError(
|
||||
f"this is MySQLdb version {version_info}, "
|
||||
f"but _mysql is version {_mysql.version_info!r}\n"
|
||||
f"_mysql: {_mysql.__file__!r}"
|
||||
)
|
||||
|
||||
|
||||
from ._mysql import (
|
||||
NotSupportedError,
|
||||
OperationalError,
|
||||
get_client_info,
|
||||
ProgrammingError,
|
||||
Error,
|
||||
InterfaceError,
|
||||
debug,
|
||||
IntegrityError,
|
||||
string_literal,
|
||||
MySQLError,
|
||||
DataError,
|
||||
DatabaseError,
|
||||
InternalError,
|
||||
Warning,
|
||||
)
|
||||
from MySQLdb.constants import FIELD_TYPE
|
||||
from MySQLdb.times import (
|
||||
Date,
|
||||
Time,
|
||||
Timestamp,
|
||||
DateFromTicks,
|
||||
TimeFromTicks,
|
||||
TimestampFromTicks,
|
||||
)
|
||||
|
||||
threadsafety = 1
|
||||
apilevel = "2.0"
|
||||
paramstyle = "format"
|
||||
|
||||
|
||||
class DBAPISet(frozenset):
|
||||
"""A special type of set for which A == x is true if A is a
|
||||
DBAPISet and x is a member of that set."""
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, DBAPISet):
|
||||
return not self.difference(other)
|
||||
return other in self
|
||||
|
||||
|
||||
STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING])
|
||||
BINARY = DBAPISet(
|
||||
[
|
||||
FIELD_TYPE.BLOB,
|
||||
FIELD_TYPE.LONG_BLOB,
|
||||
FIELD_TYPE.MEDIUM_BLOB,
|
||||
FIELD_TYPE.TINY_BLOB,
|
||||
]
|
||||
)
|
||||
NUMBER = DBAPISet(
|
||||
[
|
||||
FIELD_TYPE.DECIMAL,
|
||||
FIELD_TYPE.DOUBLE,
|
||||
FIELD_TYPE.FLOAT,
|
||||
FIELD_TYPE.INT24,
|
||||
FIELD_TYPE.LONG,
|
||||
FIELD_TYPE.LONGLONG,
|
||||
FIELD_TYPE.TINY,
|
||||
FIELD_TYPE.YEAR,
|
||||
FIELD_TYPE.NEWDECIMAL,
|
||||
]
|
||||
)
|
||||
DATE = DBAPISet([FIELD_TYPE.DATE])
|
||||
TIME = DBAPISet([FIELD_TYPE.TIME])
|
||||
TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
|
||||
DATETIME = TIMESTAMP
|
||||
ROWID = DBAPISet()
|
||||
|
||||
|
||||
def test_DBAPISet_set_equality():
|
||||
assert STRING == STRING
|
||||
|
||||
|
||||
def test_DBAPISet_set_inequality():
|
||||
assert STRING != NUMBER
|
||||
|
||||
|
||||
def test_DBAPISet_set_equality_membership():
|
||||
assert FIELD_TYPE.VAR_STRING == STRING
|
||||
|
||||
|
||||
def test_DBAPISet_set_inequality_membership():
|
||||
assert FIELD_TYPE.DATE != STRING
|
||||
|
||||
|
||||
def Binary(x):
|
||||
return bytes(x)
|
||||
|
||||
|
||||
def Connect(*args, **kwargs):
|
||||
"""Factory function for connections.Connection."""
|
||||
from MySQLdb.connections import Connection
|
||||
|
||||
return Connection(*args, **kwargs)
|
||||
|
||||
|
||||
connect = Connection = Connect
|
||||
|
||||
__all__ = [
|
||||
"BINARY",
|
||||
"Binary",
|
||||
"Connect",
|
||||
"Connection",
|
||||
"DATE",
|
||||
"Date",
|
||||
"Time",
|
||||
"Timestamp",
|
||||
"DateFromTicks",
|
||||
"TimeFromTicks",
|
||||
"TimestampFromTicks",
|
||||
"DataError",
|
||||
"DatabaseError",
|
||||
"Error",
|
||||
"FIELD_TYPE",
|
||||
"IntegrityError",
|
||||
"InterfaceError",
|
||||
"InternalError",
|
||||
"MySQLError",
|
||||
"NUMBER",
|
||||
"NotSupportedError",
|
||||
"DBAPISet",
|
||||
"OperationalError",
|
||||
"ProgrammingError",
|
||||
"ROWID",
|
||||
"STRING",
|
||||
"TIME",
|
||||
"TIMESTAMP",
|
||||
"Warning",
|
||||
"apilevel",
|
||||
"connect",
|
||||
"connections",
|
||||
"constants",
|
||||
"converters",
|
||||
"cursors",
|
||||
"debug",
|
||||
"get_client_info",
|
||||
"paramstyle",
|
||||
"string_literal",
|
||||
"threadsafety",
|
||||
"version_info",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
91
venv/lib/python3.11/site-packages/MySQLdb/_exceptions.py
Normal file
91
venv/lib/python3.11/site-packages/MySQLdb/_exceptions.py
Normal file
@ -0,0 +1,91 @@
|
||||
"""Exception classes for _mysql and MySQLdb.
|
||||
|
||||
These classes are dictated by the DB API v2.0:
|
||||
|
||||
https://www.python.org/dev/peps/pep-0249/
|
||||
"""
|
||||
|
||||
|
||||
class MySQLError(Exception):
|
||||
"""Exception related to operation with MySQL."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class Warning(Warning, MySQLError):
|
||||
"""Exception raised for important warnings like data truncations
|
||||
while inserting, etc."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class Error(MySQLError):
|
||||
"""Exception that is the base class of all other error exceptions
|
||||
(not Warning)."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class InterfaceError(Error):
|
||||
"""Exception raised for errors that are related to the database
|
||||
interface rather than the database itself."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class DatabaseError(Error):
|
||||
"""Exception raised for errors that are related to the
|
||||
database."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class DataError(DatabaseError):
|
||||
"""Exception raised for errors that are due to problems with the
|
||||
processed data like division by zero, numeric value out of range,
|
||||
etc."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class OperationalError(DatabaseError):
|
||||
"""Exception raised for errors that are related to the database's
|
||||
operation and not necessarily under the control of the programmer,
|
||||
e.g. an unexpected disconnect occurs, the data source name is not
|
||||
found, a transaction could not be processed, a memory allocation
|
||||
error occurred during processing, etc."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class IntegrityError(DatabaseError):
|
||||
"""Exception raised when the relational integrity of the database
|
||||
is affected, e.g. a foreign key check fails, duplicate key,
|
||||
etc."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class InternalError(DatabaseError):
|
||||
"""Exception raised when the database encounters an internal
|
||||
error, e.g. the cursor is not valid anymore, the transaction is
|
||||
out of sync, etc."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class ProgrammingError(DatabaseError):
|
||||
"""Exception raised for programming errors, e.g. table not found
|
||||
or already exists, syntax error in the SQL statement, wrong number
|
||||
of parameters specified, etc."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
|
||||
|
||||
class NotSupportedError(DatabaseError):
|
||||
"""Exception raised in case a method or database API was used
|
||||
which is not supported by the database, e.g. requesting a
|
||||
.rollback() on a connection that does not support transaction or
|
||||
has transactions turned off."""
|
||||
|
||||
__module__ = "MySQLdb"
|
||||
2959
venv/lib/python3.11/site-packages/MySQLdb/_mysql.c
Normal file
2959
venv/lib/python3.11/site-packages/MySQLdb/_mysql.c
Normal file
File diff suppressed because it is too large
Load Diff
BIN
venv/lib/python3.11/site-packages/MySQLdb/_mysql.cpython-311-x86_64-linux-gnu.so
Executable file
BIN
venv/lib/python3.11/site-packages/MySQLdb/_mysql.cpython-311-x86_64-linux-gnu.so
Executable file
Binary file not shown.
362
venv/lib/python3.11/site-packages/MySQLdb/connections.py
Normal file
362
venv/lib/python3.11/site-packages/MySQLdb/connections.py
Normal file
@ -0,0 +1,362 @@
|
||||
"""
|
||||
This module implements connections for MySQLdb. Presently there is
|
||||
only one class: Connection. Others are unlikely. However, you might
|
||||
want to make your own subclasses. In most cases, you will probably
|
||||
override Connection.default_cursor with a non-standard Cursor class.
|
||||
"""
|
||||
import re
|
||||
|
||||
from . import cursors, _mysql
|
||||
from ._exceptions import (
|
||||
Warning,
|
||||
Error,
|
||||
InterfaceError,
|
||||
DataError,
|
||||
DatabaseError,
|
||||
OperationalError,
|
||||
IntegrityError,
|
||||
InternalError,
|
||||
NotSupportedError,
|
||||
ProgrammingError,
|
||||
)
|
||||
|
||||
# Mapping from MySQL charset name to Python codec name
|
||||
_charset_to_encoding = {
|
||||
"utf8mb4": "utf8",
|
||||
"utf8mb3": "utf8",
|
||||
"latin1": "cp1252",
|
||||
"koi8r": "koi8_r",
|
||||
"koi8u": "koi8_u",
|
||||
}
|
||||
|
||||
re_numeric_part = re.compile(r"^(\d+)")
|
||||
|
||||
|
||||
def numeric_part(s):
|
||||
"""Returns the leading numeric part of a string.
|
||||
|
||||
>>> numeric_part("20-alpha")
|
||||
20
|
||||
>>> numeric_part("foo")
|
||||
>>> numeric_part("16b")
|
||||
16
|
||||
"""
|
||||
|
||||
m = re_numeric_part.match(s)
|
||||
if m:
|
||||
return int(m.group(1))
|
||||
return None
|
||||
|
||||
|
||||
class Connection(_mysql.connection):
|
||||
"""MySQL Database Connection Object"""
|
||||
|
||||
default_cursor = cursors.Cursor
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Create a connection to the database. It is strongly recommended
|
||||
that you only use keyword parameters. Consult the MySQL C API
|
||||
documentation for more information.
|
||||
|
||||
:param str host: host to connect
|
||||
:param str user: user to connect as
|
||||
:param str password: password to use
|
||||
:param str passwd: alias of password (deprecated)
|
||||
:param str database: database to use
|
||||
:param str db: alias of database (deprecated)
|
||||
:param int port: TCP/IP port to connect to
|
||||
:param str unix_socket: location of unix_socket to use
|
||||
:param dict conv: conversion dictionary, see MySQLdb.converters
|
||||
:param int connect_timeout:
|
||||
number of seconds to wait before the connection attempt fails.
|
||||
|
||||
:param bool compress: if set, compression is enabled
|
||||
:param str named_pipe: if set, a named pipe is used to connect (Windows only)
|
||||
:param str init_command:
|
||||
command which is run once the connection is created
|
||||
|
||||
:param str read_default_file:
|
||||
file from which default client values are read
|
||||
|
||||
:param str read_default_group:
|
||||
configuration group to use from the default file
|
||||
|
||||
:param type cursorclass:
|
||||
class object, used to create cursors (keyword only)
|
||||
|
||||
:param bool use_unicode:
|
||||
If True, text-like columns are returned as unicode objects
|
||||
using the connection's character set. Otherwise, text-like
|
||||
columns are returned as bytes. Unicode objects will always
|
||||
be encoded to the connection's character set regardless of
|
||||
this setting.
|
||||
Default to True.
|
||||
|
||||
:param str charset:
|
||||
If supplied, the connection character set will be changed
|
||||
to this character set.
|
||||
|
||||
:param str collation:
|
||||
If ``charset`` and ``collation`` are both supplied, the
|
||||
character set and collation for the current connection
|
||||
will be set.
|
||||
|
||||
If omitted, empty string, or None, the default collation
|
||||
for the ``charset`` is implied.
|
||||
|
||||
:param str auth_plugin:
|
||||
If supplied, the connection default authentication plugin will be
|
||||
changed to this value. Example values:
|
||||
`mysql_native_password` or `caching_sha2_password`
|
||||
|
||||
:param str sql_mode:
|
||||
If supplied, the session SQL mode will be changed to this
|
||||
setting.
|
||||
For more details and legal values, see the MySQL documentation.
|
||||
|
||||
:param int client_flag:
|
||||
flags to use or 0 (see MySQL docs or constants/CLIENTS.py)
|
||||
|
||||
:param bool multi_statements:
|
||||
If True, enable multi statements for clients >= 4.1.
|
||||
Defaults to True.
|
||||
|
||||
:param str ssl_mode:
|
||||
specify the security settings for connection to the server;
|
||||
see the MySQL documentation for more details
|
||||
(mysql_option(), MYSQL_OPT_SSL_MODE).
|
||||
Only one of 'DISABLED', 'PREFERRED', 'REQUIRED',
|
||||
'VERIFY_CA', 'VERIFY_IDENTITY' can be specified.
|
||||
|
||||
:param dict ssl:
|
||||
dictionary or mapping contains SSL connection parameters;
|
||||
see the MySQL documentation for more details
|
||||
(mysql_ssl_set()). If this is set, and the client does not
|
||||
support SSL, NotSupportedError will be raised.
|
||||
Since mysqlclient 2.2.4, ssl=True is alias of ssl_mode=REQUIRED
|
||||
for better compatibility with PyMySQL and MariaDB.
|
||||
|
||||
:param str server_public_key_path:
|
||||
specify the path to a file RSA public key file for caching_sha2_password.
|
||||
See https://dev.mysql.com/doc/refman/9.0/en/caching-sha2-pluggable-authentication.html
|
||||
|
||||
:param bool local_infile:
|
||||
enables LOAD LOCAL INFILE; zero disables
|
||||
|
||||
:param bool autocommit:
|
||||
If False (default), autocommit is disabled.
|
||||
If True, autocommit is enabled.
|
||||
If None, autocommit isn't set and server default is used.
|
||||
|
||||
:param bool binary_prefix:
|
||||
If set, the '_binary' prefix will be used for raw byte query
|
||||
arguments (e.g. Binary). This is disabled by default.
|
||||
|
||||
There are a number of undocumented, non-standard methods. See the
|
||||
documentation for the MySQL C API for some hints on what they do.
|
||||
"""
|
||||
from MySQLdb.constants import CLIENT, FIELD_TYPE
|
||||
from MySQLdb.converters import conversions, _bytes_or_str
|
||||
|
||||
kwargs2 = kwargs.copy()
|
||||
|
||||
if "db" in kwargs2:
|
||||
kwargs2["database"] = kwargs2.pop("db")
|
||||
if "passwd" in kwargs2:
|
||||
kwargs2["password"] = kwargs2.pop("passwd")
|
||||
|
||||
if "conv" in kwargs:
|
||||
conv = kwargs["conv"]
|
||||
else:
|
||||
conv = conversions
|
||||
|
||||
conv2 = {}
|
||||
for k, v in conv.items():
|
||||
if isinstance(k, int) and isinstance(v, list):
|
||||
conv2[k] = v[:]
|
||||
else:
|
||||
conv2[k] = v
|
||||
kwargs2["conv"] = conv2
|
||||
|
||||
cursorclass = kwargs2.pop("cursorclass", self.default_cursor)
|
||||
charset = kwargs2.get("charset", "")
|
||||
collation = kwargs2.pop("collation", "")
|
||||
use_unicode = kwargs2.pop("use_unicode", True)
|
||||
sql_mode = kwargs2.pop("sql_mode", "")
|
||||
self._binary_prefix = kwargs2.pop("binary_prefix", False)
|
||||
|
||||
client_flag = kwargs.get("client_flag", 0)
|
||||
client_flag |= CLIENT.MULTI_RESULTS
|
||||
multi_statements = kwargs2.pop("multi_statements", True)
|
||||
if multi_statements:
|
||||
client_flag |= CLIENT.MULTI_STATEMENTS
|
||||
kwargs2["client_flag"] = client_flag
|
||||
|
||||
# PEP-249 requires autocommit to be initially off
|
||||
autocommit = kwargs2.pop("autocommit", False)
|
||||
|
||||
self._set_attributes(*args, **kwargs2)
|
||||
super().__init__(*args, **kwargs2)
|
||||
|
||||
self.cursorclass = cursorclass
|
||||
self.encoders = {
|
||||
k: v
|
||||
for k, v in conv.items()
|
||||
if type(k) is not int # noqa: E721
|
||||
}
|
||||
self._server_version = tuple(
|
||||
[numeric_part(n) for n in self.get_server_info().split(".")[:2]]
|
||||
)
|
||||
self.encoding = "ascii" # overridden in set_character_set()
|
||||
|
||||
if not charset:
|
||||
charset = self.character_set_name()
|
||||
self.set_character_set(charset, collation)
|
||||
|
||||
if sql_mode:
|
||||
self.set_sql_mode(sql_mode)
|
||||
|
||||
if use_unicode:
|
||||
for t in (
|
||||
FIELD_TYPE.STRING,
|
||||
FIELD_TYPE.VAR_STRING,
|
||||
FIELD_TYPE.VARCHAR,
|
||||
FIELD_TYPE.TINY_BLOB,
|
||||
FIELD_TYPE.MEDIUM_BLOB,
|
||||
FIELD_TYPE.LONG_BLOB,
|
||||
FIELD_TYPE.BLOB,
|
||||
):
|
||||
self.converter[t] = _bytes_or_str
|
||||
# Unlike other string/blob types, JSON is always text.
|
||||
# MySQL may return JSON with charset==binary.
|
||||
self.converter[FIELD_TYPE.JSON] = str
|
||||
|
||||
self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS
|
||||
if self._transactional:
|
||||
if autocommit is not None:
|
||||
self.autocommit(autocommit)
|
||||
self.messages = []
|
||||
|
||||
def _set_attributes(self, host=None, user=None, password=None, database="", port=3306,
|
||||
unix_socket=None, **kwargs):
|
||||
"""set some attributes for otel"""
|
||||
if unix_socket and not host:
|
||||
host = "localhost"
|
||||
# support opentelemetry-instrumentation-dbapi
|
||||
self.host = host
|
||||
# _mysql.Connection provides self.port
|
||||
self.user = user
|
||||
self.database = database
|
||||
# otel-inst-mysqlclient uses db instead of database.
|
||||
self.db = database
|
||||
# NOTE: We have not supported semantic conventions yet.
|
||||
# https://opentelemetry.io/docs/specs/semconv/database/sql/
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def autocommit(self, on):
|
||||
on = bool(on)
|
||||
if self.get_autocommit() != on:
|
||||
_mysql.connection.autocommit(self, on)
|
||||
|
||||
def cursor(self, cursorclass=None):
|
||||
"""
|
||||
Create a cursor on which queries may be performed. The
|
||||
optional cursorclass parameter is used to create the
|
||||
Cursor. By default, self.cursorclass=cursors.Cursor is
|
||||
used.
|
||||
"""
|
||||
return (cursorclass or self.cursorclass)(self)
|
||||
|
||||
def query(self, query):
|
||||
# Since _mysql releases GIL while querying, we need immutable buffer.
|
||||
if isinstance(query, bytearray):
|
||||
query = bytes(query)
|
||||
_mysql.connection.query(self, query)
|
||||
|
||||
def _bytes_literal(self, bs):
|
||||
assert isinstance(bs, (bytes, bytearray))
|
||||
x = self.string_literal(bs) # x is escaped and quoted bytes
|
||||
if self._binary_prefix:
|
||||
return b"_binary" + x
|
||||
return x
|
||||
|
||||
def _tuple_literal(self, t):
|
||||
return b"(%s)" % (b",".join(map(self.literal, t)))
|
||||
|
||||
def literal(self, o):
|
||||
"""If o is a single object, returns an SQL literal as a string.
|
||||
If o is a non-string sequence, the items of the sequence are
|
||||
converted and returned as a sequence.
|
||||
|
||||
Non-standard. For internal use; do not use this in your
|
||||
applications.
|
||||
"""
|
||||
if isinstance(o, str):
|
||||
s = self.string_literal(o.encode(self.encoding))
|
||||
elif isinstance(o, bytearray):
|
||||
s = self._bytes_literal(o)
|
||||
elif isinstance(o, bytes):
|
||||
s = self._bytes_literal(o)
|
||||
elif isinstance(o, (tuple, list)):
|
||||
s = self._tuple_literal(o)
|
||||
else:
|
||||
s = self.escape(o, self.encoders)
|
||||
if isinstance(s, str):
|
||||
s = s.encode(self.encoding)
|
||||
assert isinstance(s, bytes)
|
||||
return s
|
||||
|
||||
def begin(self):
|
||||
"""Explicitly begin a connection.
|
||||
|
||||
This method is not used when autocommit=False (default).
|
||||
"""
|
||||
self.query(b"BEGIN")
|
||||
|
||||
def set_character_set(self, charset, collation=None):
|
||||
"""Set the connection character set to charset."""
|
||||
super().set_character_set(charset)
|
||||
self.encoding = _charset_to_encoding.get(charset, charset)
|
||||
if collation:
|
||||
self.query(f"SET NAMES {charset} COLLATE {collation}")
|
||||
self.store_result()
|
||||
|
||||
def set_sql_mode(self, sql_mode):
|
||||
"""Set the connection sql_mode. See MySQL documentation for
|
||||
legal values."""
|
||||
if self._server_version < (4, 1):
|
||||
raise NotSupportedError("server is too old to set sql_mode")
|
||||
self.query("SET SESSION sql_mode='%s'" % sql_mode)
|
||||
self.store_result()
|
||||
|
||||
def show_warnings(self):
|
||||
"""Return detailed information about warnings as a
|
||||
sequence of tuples of (Level, Code, Message). This
|
||||
is only supported in MySQL-4.1 and up. If your server
|
||||
is an earlier version, an empty sequence is returned."""
|
||||
if self._server_version < (4, 1):
|
||||
return ()
|
||||
self.query("SHOW WARNINGS")
|
||||
r = self.store_result()
|
||||
warnings = r.fetch_row(0)
|
||||
return warnings
|
||||
|
||||
Warning = Warning
|
||||
Error = Error
|
||||
InterfaceError = InterfaceError
|
||||
DatabaseError = DatabaseError
|
||||
DataError = DataError
|
||||
OperationalError = OperationalError
|
||||
IntegrityError = IntegrityError
|
||||
InternalError = InternalError
|
||||
ProgrammingError = ProgrammingError
|
||||
NotSupportedError = NotSupportedError
|
||||
|
||||
|
||||
# vim: colorcolumn=100
|
||||
@ -0,0 +1,27 @@
|
||||
"""MySQL CLIENT constants
|
||||
|
||||
These constants are used when creating the connection. Use bitwise-OR
|
||||
(|) to combine options together, and pass them as the client_flags
|
||||
parameter to MySQLdb.Connection. For more information on these flags,
|
||||
see the MySQL C API documentation for mysql_real_connect().
|
||||
|
||||
"""
|
||||
|
||||
LONG_PASSWORD = 1
|
||||
FOUND_ROWS = 2
|
||||
LONG_FLAG = 4
|
||||
CONNECT_WITH_DB = 8
|
||||
NO_SCHEMA = 16
|
||||
COMPRESS = 32
|
||||
ODBC = 64
|
||||
LOCAL_FILES = 128
|
||||
IGNORE_SPACE = 256
|
||||
CHANGE_USER = 512
|
||||
INTERACTIVE = 1024
|
||||
SSL = 2048
|
||||
IGNORE_SIGPIPE = 4096
|
||||
TRANSACTIONS = 8192 # mysql_com.h was WRONG prior to 3.23.35
|
||||
RESERVED = 16384
|
||||
SECURE_CONNECTION = 32768
|
||||
MULTI_STATEMENTS = 65536
|
||||
MULTI_RESULTS = 131072
|
||||
105
venv/lib/python3.11/site-packages/MySQLdb/constants/CR.py
Normal file
105
venv/lib/python3.11/site-packages/MySQLdb/constants/CR.py
Normal file
@ -0,0 +1,105 @@
|
||||
"""MySQL Connection Errors
|
||||
|
||||
Nearly all of these raise OperationalError. COMMANDS_OUT_OF_SYNC
|
||||
raises ProgrammingError.
|
||||
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
Usage: python CR.py [/path/to/mysql/errmsg.h ...] >> CR.py
|
||||
"""
|
||||
import fileinput
|
||||
import re
|
||||
|
||||
data = {}
|
||||
error_last = None
|
||||
for line in fileinput.input():
|
||||
line = re.sub(r"/\*.*?\*/", "", line)
|
||||
m = re.match(r"^\s*#define\s+CR_([A-Z0-9_]+)\s+(\d+)(\s.*|$)", line)
|
||||
if m:
|
||||
name = m.group(1)
|
||||
value = int(m.group(2))
|
||||
if name == "ERROR_LAST":
|
||||
if error_last is None or error_last < value:
|
||||
error_last = value
|
||||
continue
|
||||
if value not in data:
|
||||
data[value] = set()
|
||||
data[value].add(name)
|
||||
for value, names in sorted(data.items()):
|
||||
for name in sorted(names):
|
||||
print(f"{name} = {value}")
|
||||
if error_last is not None:
|
||||
print("ERROR_LAST = %s" % error_last)
|
||||
|
||||
|
||||
ERROR_FIRST = 2000
|
||||
MIN_ERROR = 2000
|
||||
UNKNOWN_ERROR = 2000
|
||||
SOCKET_CREATE_ERROR = 2001
|
||||
CONNECTION_ERROR = 2002
|
||||
CONN_HOST_ERROR = 2003
|
||||
IPSOCK_ERROR = 2004
|
||||
UNKNOWN_HOST = 2005
|
||||
SERVER_GONE_ERROR = 2006
|
||||
VERSION_ERROR = 2007
|
||||
OUT_OF_MEMORY = 2008
|
||||
WRONG_HOST_INFO = 2009
|
||||
LOCALHOST_CONNECTION = 2010
|
||||
TCP_CONNECTION = 2011
|
||||
SERVER_HANDSHAKE_ERR = 2012
|
||||
SERVER_LOST = 2013
|
||||
COMMANDS_OUT_OF_SYNC = 2014
|
||||
NAMEDPIPE_CONNECTION = 2015
|
||||
NAMEDPIPEWAIT_ERROR = 2016
|
||||
NAMEDPIPEOPEN_ERROR = 2017
|
||||
NAMEDPIPESETSTATE_ERROR = 2018
|
||||
CANT_READ_CHARSET = 2019
|
||||
NET_PACKET_TOO_LARGE = 2020
|
||||
EMBEDDED_CONNECTION = 2021
|
||||
PROBE_SLAVE_STATUS = 2022
|
||||
PROBE_SLAVE_HOSTS = 2023
|
||||
PROBE_SLAVE_CONNECT = 2024
|
||||
PROBE_MASTER_CONNECT = 2025
|
||||
SSL_CONNECTION_ERROR = 2026
|
||||
MALFORMED_PACKET = 2027
|
||||
WRONG_LICENSE = 2028
|
||||
NULL_POINTER = 2029
|
||||
NO_PREPARE_STMT = 2030
|
||||
PARAMS_NOT_BOUND = 2031
|
||||
DATA_TRUNCATED = 2032
|
||||
NO_PARAMETERS_EXISTS = 2033
|
||||
INVALID_PARAMETER_NO = 2034
|
||||
INVALID_BUFFER_USE = 2035
|
||||
UNSUPPORTED_PARAM_TYPE = 2036
|
||||
SHARED_MEMORY_CONNECTION = 2037
|
||||
SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
|
||||
SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
|
||||
SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
|
||||
SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
|
||||
SHARED_MEMORY_FILE_MAP_ERROR = 2042
|
||||
SHARED_MEMORY_MAP_ERROR = 2043
|
||||
SHARED_MEMORY_EVENT_ERROR = 2044
|
||||
SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
|
||||
SHARED_MEMORY_CONNECT_SET_ERROR = 2046
|
||||
CONN_UNKNOW_PROTOCOL = 2047
|
||||
INVALID_CONN_HANDLE = 2048
|
||||
UNUSED_1 = 2049
|
||||
FETCH_CANCELED = 2050
|
||||
NO_DATA = 2051
|
||||
NO_STMT_METADATA = 2052
|
||||
NO_RESULT_SET = 2053
|
||||
NOT_IMPLEMENTED = 2054
|
||||
SERVER_LOST_EXTENDED = 2055
|
||||
STMT_CLOSED = 2056
|
||||
NEW_STMT_METADATA = 2057
|
||||
ALREADY_CONNECTED = 2058
|
||||
AUTH_PLUGIN_CANNOT_LOAD = 2059
|
||||
DUPLICATE_CONNECTION_ATTR = 2060
|
||||
AUTH_PLUGIN_ERR = 2061
|
||||
INSECURE_API_ERR = 2062
|
||||
FILE_NAME_TOO_LONG = 2063
|
||||
SSL_FIPS_MODE_ERR = 2064
|
||||
MAX_ERROR = 2999
|
||||
ERROR_LAST = 2064
|
||||
827
venv/lib/python3.11/site-packages/MySQLdb/constants/ER.py
Normal file
827
venv/lib/python3.11/site-packages/MySQLdb/constants/ER.py
Normal file
@ -0,0 +1,827 @@
|
||||
"""MySQL ER Constants
|
||||
|
||||
These constants are error codes for the bulk of the error conditions
|
||||
that may occur.
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
Usage: python ER.py [/path/to/mysql/mysqld_error.h ...] >> ER.py
|
||||
"""
|
||||
import fileinput
|
||||
import re
|
||||
|
||||
data = {}
|
||||
error_last = None
|
||||
for line in fileinput.input():
|
||||
line = re.sub(r"/\*.*?\*/", "", line)
|
||||
m = re.match(r"^\s*#define\s+((ER|WARN)_[A-Z0-9_]+)\s+(\d+)\s*", line)
|
||||
if m:
|
||||
name = m.group(1)
|
||||
if name.startswith("ER_"):
|
||||
name = name[3:]
|
||||
value = int(m.group(3))
|
||||
if name == "ERROR_LAST":
|
||||
if error_last is None or error_last < value:
|
||||
error_last = value
|
||||
continue
|
||||
if value not in data:
|
||||
data[value] = set()
|
||||
data[value].add(name)
|
||||
for value, names in sorted(data.items()):
|
||||
for name in sorted(names):
|
||||
print(f"{name} = {value}")
|
||||
if error_last is not None:
|
||||
print("ERROR_LAST = %s" % error_last)
|
||||
|
||||
|
||||
ERROR_FIRST = 1000
|
||||
NO = 1002
|
||||
YES = 1003
|
||||
CANT_CREATE_FILE = 1004
|
||||
CANT_CREATE_TABLE = 1005
|
||||
CANT_CREATE_DB = 1006
|
||||
DB_CREATE_EXISTS = 1007
|
||||
DB_DROP_EXISTS = 1008
|
||||
DB_DROP_RMDIR = 1010
|
||||
CANT_FIND_SYSTEM_REC = 1012
|
||||
CANT_GET_STAT = 1013
|
||||
CANT_LOCK = 1015
|
||||
CANT_OPEN_FILE = 1016
|
||||
FILE_NOT_FOUND = 1017
|
||||
CANT_READ_DIR = 1018
|
||||
CHECKREAD = 1020
|
||||
DUP_KEY = 1022
|
||||
ERROR_ON_READ = 1024
|
||||
ERROR_ON_RENAME = 1025
|
||||
ERROR_ON_WRITE = 1026
|
||||
FILE_USED = 1027
|
||||
FILSORT_ABORT = 1028
|
||||
GET_ERRNO = 1030
|
||||
ILLEGAL_HA = 1031
|
||||
KEY_NOT_FOUND = 1032
|
||||
NOT_FORM_FILE = 1033
|
||||
NOT_KEYFILE = 1034
|
||||
OLD_KEYFILE = 1035
|
||||
OPEN_AS_READONLY = 1036
|
||||
OUTOFMEMORY = 1037
|
||||
OUT_OF_SORTMEMORY = 1038
|
||||
CON_COUNT_ERROR = 1040
|
||||
OUT_OF_RESOURCES = 1041
|
||||
BAD_HOST_ERROR = 1042
|
||||
HANDSHAKE_ERROR = 1043
|
||||
DBACCESS_DENIED_ERROR = 1044
|
||||
ACCESS_DENIED_ERROR = 1045
|
||||
NO_DB_ERROR = 1046
|
||||
UNKNOWN_COM_ERROR = 1047
|
||||
BAD_NULL_ERROR = 1048
|
||||
BAD_DB_ERROR = 1049
|
||||
TABLE_EXISTS_ERROR = 1050
|
||||
BAD_TABLE_ERROR = 1051
|
||||
NON_UNIQ_ERROR = 1052
|
||||
SERVER_SHUTDOWN = 1053
|
||||
BAD_FIELD_ERROR = 1054
|
||||
WRONG_FIELD_WITH_GROUP = 1055
|
||||
WRONG_GROUP_FIELD = 1056
|
||||
WRONG_SUM_SELECT = 1057
|
||||
WRONG_VALUE_COUNT = 1058
|
||||
TOO_LONG_IDENT = 1059
|
||||
DUP_FIELDNAME = 1060
|
||||
DUP_KEYNAME = 1061
|
||||
DUP_ENTRY = 1062
|
||||
WRONG_FIELD_SPEC = 1063
|
||||
PARSE_ERROR = 1064
|
||||
EMPTY_QUERY = 1065
|
||||
NONUNIQ_TABLE = 1066
|
||||
INVALID_DEFAULT = 1067
|
||||
MULTIPLE_PRI_KEY = 1068
|
||||
TOO_MANY_KEYS = 1069
|
||||
TOO_MANY_KEY_PARTS = 1070
|
||||
TOO_LONG_KEY = 1071
|
||||
KEY_COLUMN_DOES_NOT_EXITS = 1072
|
||||
BLOB_USED_AS_KEY = 1073
|
||||
TOO_BIG_FIELDLENGTH = 1074
|
||||
WRONG_AUTO_KEY = 1075
|
||||
READY = 1076
|
||||
SHUTDOWN_COMPLETE = 1079
|
||||
FORCING_CLOSE = 1080
|
||||
IPSOCK_ERROR = 1081
|
||||
NO_SUCH_INDEX = 1082
|
||||
WRONG_FIELD_TERMINATORS = 1083
|
||||
BLOBS_AND_NO_TERMINATED = 1084
|
||||
TEXTFILE_NOT_READABLE = 1085
|
||||
FILE_EXISTS_ERROR = 1086
|
||||
LOAD_INFO = 1087
|
||||
ALTER_INFO = 1088
|
||||
WRONG_SUB_KEY = 1089
|
||||
CANT_REMOVE_ALL_FIELDS = 1090
|
||||
CANT_DROP_FIELD_OR_KEY = 1091
|
||||
INSERT_INFO = 1092
|
||||
UPDATE_TABLE_USED = 1093
|
||||
NO_SUCH_THREAD = 1094
|
||||
KILL_DENIED_ERROR = 1095
|
||||
NO_TABLES_USED = 1096
|
||||
TOO_BIG_SET = 1097
|
||||
NO_UNIQUE_LOGFILE = 1098
|
||||
TABLE_NOT_LOCKED_FOR_WRITE = 1099
|
||||
TABLE_NOT_LOCKED = 1100
|
||||
BLOB_CANT_HAVE_DEFAULT = 1101
|
||||
WRONG_DB_NAME = 1102
|
||||
WRONG_TABLE_NAME = 1103
|
||||
TOO_BIG_SELECT = 1104
|
||||
UNKNOWN_ERROR = 1105
|
||||
UNKNOWN_PROCEDURE = 1106
|
||||
WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
|
||||
WRONG_PARAMETERS_TO_PROCEDURE = 1108
|
||||
UNKNOWN_TABLE = 1109
|
||||
FIELD_SPECIFIED_TWICE = 1110
|
||||
INVALID_GROUP_FUNC_USE = 1111
|
||||
UNSUPPORTED_EXTENSION = 1112
|
||||
TABLE_MUST_HAVE_COLUMNS = 1113
|
||||
RECORD_FILE_FULL = 1114
|
||||
UNKNOWN_CHARACTER_SET = 1115
|
||||
TOO_MANY_TABLES = 1116
|
||||
TOO_MANY_FIELDS = 1117
|
||||
TOO_BIG_ROWSIZE = 1118
|
||||
STACK_OVERRUN = 1119
|
||||
WRONG_OUTER_JOIN_UNUSED = 1120
|
||||
NULL_COLUMN_IN_INDEX = 1121
|
||||
CANT_FIND_UDF = 1122
|
||||
CANT_INITIALIZE_UDF = 1123
|
||||
UDF_NO_PATHS = 1124
|
||||
UDF_EXISTS = 1125
|
||||
CANT_OPEN_LIBRARY = 1126
|
||||
CANT_FIND_DL_ENTRY = 1127
|
||||
FUNCTION_NOT_DEFINED = 1128
|
||||
HOST_IS_BLOCKED = 1129
|
||||
HOST_NOT_PRIVILEGED = 1130
|
||||
PASSWORD_ANONYMOUS_USER = 1131
|
||||
PASSWORD_NOT_ALLOWED = 1132
|
||||
PASSWORD_NO_MATCH = 1133
|
||||
UPDATE_INFO = 1134
|
||||
CANT_CREATE_THREAD = 1135
|
||||
WRONG_VALUE_COUNT_ON_ROW = 1136
|
||||
CANT_REOPEN_TABLE = 1137
|
||||
INVALID_USE_OF_NULL = 1138
|
||||
REGEXP_ERROR = 1139
|
||||
MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
|
||||
NONEXISTING_GRANT = 1141
|
||||
TABLEACCESS_DENIED_ERROR = 1142
|
||||
COLUMNACCESS_DENIED_ERROR = 1143
|
||||
ILLEGAL_GRANT_FOR_TABLE = 1144
|
||||
GRANT_WRONG_HOST_OR_USER = 1145
|
||||
NO_SUCH_TABLE = 1146
|
||||
NONEXISTING_TABLE_GRANT = 1147
|
||||
NOT_ALLOWED_COMMAND = 1148
|
||||
SYNTAX_ERROR = 1149
|
||||
ABORTING_CONNECTION = 1152
|
||||
NET_PACKET_TOO_LARGE = 1153
|
||||
NET_READ_ERROR_FROM_PIPE = 1154
|
||||
NET_FCNTL_ERROR = 1155
|
||||
NET_PACKETS_OUT_OF_ORDER = 1156
|
||||
NET_UNCOMPRESS_ERROR = 1157
|
||||
NET_READ_ERROR = 1158
|
||||
NET_READ_INTERRUPTED = 1159
|
||||
NET_ERROR_ON_WRITE = 1160
|
||||
NET_WRITE_INTERRUPTED = 1161
|
||||
TOO_LONG_STRING = 1162
|
||||
TABLE_CANT_HANDLE_BLOB = 1163
|
||||
TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
|
||||
WRONG_COLUMN_NAME = 1166
|
||||
WRONG_KEY_COLUMN = 1167
|
||||
WRONG_MRG_TABLE = 1168
|
||||
DUP_UNIQUE = 1169
|
||||
BLOB_KEY_WITHOUT_LENGTH = 1170
|
||||
PRIMARY_CANT_HAVE_NULL = 1171
|
||||
TOO_MANY_ROWS = 1172
|
||||
REQUIRES_PRIMARY_KEY = 1173
|
||||
UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
|
||||
KEY_DOES_NOT_EXITS = 1176
|
||||
CHECK_NO_SUCH_TABLE = 1177
|
||||
CHECK_NOT_IMPLEMENTED = 1178
|
||||
CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
|
||||
ERROR_DURING_COMMIT = 1180
|
||||
ERROR_DURING_ROLLBACK = 1181
|
||||
ERROR_DURING_FLUSH_LOGS = 1182
|
||||
NEW_ABORTING_CONNECTION = 1184
|
||||
MASTER = 1188
|
||||
MASTER_NET_READ = 1189
|
||||
MASTER_NET_WRITE = 1190
|
||||
FT_MATCHING_KEY_NOT_FOUND = 1191
|
||||
LOCK_OR_ACTIVE_TRANSACTION = 1192
|
||||
UNKNOWN_SYSTEM_VARIABLE = 1193
|
||||
CRASHED_ON_USAGE = 1194
|
||||
CRASHED_ON_REPAIR = 1195
|
||||
WARNING_NOT_COMPLETE_ROLLBACK = 1196
|
||||
TRANS_CACHE_FULL = 1197
|
||||
SLAVE_NOT_RUNNING = 1199
|
||||
BAD_SLAVE = 1200
|
||||
MASTER_INFO = 1201
|
||||
SLAVE_THREAD = 1202
|
||||
TOO_MANY_USER_CONNECTIONS = 1203
|
||||
SET_CONSTANTS_ONLY = 1204
|
||||
LOCK_WAIT_TIMEOUT = 1205
|
||||
LOCK_TABLE_FULL = 1206
|
||||
READ_ONLY_TRANSACTION = 1207
|
||||
WRONG_ARGUMENTS = 1210
|
||||
NO_PERMISSION_TO_CREATE_USER = 1211
|
||||
LOCK_DEADLOCK = 1213
|
||||
TABLE_CANT_HANDLE_FT = 1214
|
||||
CANNOT_ADD_FOREIGN = 1215
|
||||
NO_REFERENCED_ROW = 1216
|
||||
ROW_IS_REFERENCED = 1217
|
||||
CONNECT_TO_MASTER = 1218
|
||||
ERROR_WHEN_EXECUTING_COMMAND = 1220
|
||||
WRONG_USAGE = 1221
|
||||
WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
|
||||
CANT_UPDATE_WITH_READLOCK = 1223
|
||||
MIXING_NOT_ALLOWED = 1224
|
||||
DUP_ARGUMENT = 1225
|
||||
USER_LIMIT_REACHED = 1226
|
||||
SPECIFIC_ACCESS_DENIED_ERROR = 1227
|
||||
LOCAL_VARIABLE = 1228
|
||||
GLOBAL_VARIABLE = 1229
|
||||
NO_DEFAULT = 1230
|
||||
WRONG_VALUE_FOR_VAR = 1231
|
||||
WRONG_TYPE_FOR_VAR = 1232
|
||||
VAR_CANT_BE_READ = 1233
|
||||
CANT_USE_OPTION_HERE = 1234
|
||||
NOT_SUPPORTED_YET = 1235
|
||||
MASTER_FATAL_ERROR_READING_BINLOG = 1236
|
||||
SLAVE_IGNORED_TABLE = 1237
|
||||
INCORRECT_GLOBAL_LOCAL_VAR = 1238
|
||||
WRONG_FK_DEF = 1239
|
||||
KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
|
||||
OPERAND_COLUMNS = 1241
|
||||
SUBQUERY_NO_1_ROW = 1242
|
||||
UNKNOWN_STMT_HANDLER = 1243
|
||||
CORRUPT_HELP_DB = 1244
|
||||
AUTO_CONVERT = 1246
|
||||
ILLEGAL_REFERENCE = 1247
|
||||
DERIVED_MUST_HAVE_ALIAS = 1248
|
||||
SELECT_REDUCED = 1249
|
||||
TABLENAME_NOT_ALLOWED_HERE = 1250
|
||||
NOT_SUPPORTED_AUTH_MODE = 1251
|
||||
SPATIAL_CANT_HAVE_NULL = 1252
|
||||
COLLATION_CHARSET_MISMATCH = 1253
|
||||
TOO_BIG_FOR_UNCOMPRESS = 1256
|
||||
ZLIB_Z_MEM_ERROR = 1257
|
||||
ZLIB_Z_BUF_ERROR = 1258
|
||||
ZLIB_Z_DATA_ERROR = 1259
|
||||
CUT_VALUE_GROUP_CONCAT = 1260
|
||||
WARN_TOO_FEW_RECORDS = 1261
|
||||
WARN_TOO_MANY_RECORDS = 1262
|
||||
WARN_NULL_TO_NOTNULL = 1263
|
||||
WARN_DATA_OUT_OF_RANGE = 1264
|
||||
WARN_DATA_TRUNCATED = 1265
|
||||
WARN_USING_OTHER_HANDLER = 1266
|
||||
CANT_AGGREGATE_2COLLATIONS = 1267
|
||||
REVOKE_GRANTS = 1269
|
||||
CANT_AGGREGATE_3COLLATIONS = 1270
|
||||
CANT_AGGREGATE_NCOLLATIONS = 1271
|
||||
VARIABLE_IS_NOT_STRUCT = 1272
|
||||
UNKNOWN_COLLATION = 1273
|
||||
SLAVE_IGNORED_SSL_PARAMS = 1274
|
||||
SERVER_IS_IN_SECURE_AUTH_MODE = 1275
|
||||
WARN_FIELD_RESOLVED = 1276
|
||||
BAD_SLAVE_UNTIL_COND = 1277
|
||||
MISSING_SKIP_SLAVE = 1278
|
||||
UNTIL_COND_IGNORED = 1279
|
||||
WRONG_NAME_FOR_INDEX = 1280
|
||||
WRONG_NAME_FOR_CATALOG = 1281
|
||||
BAD_FT_COLUMN = 1283
|
||||
UNKNOWN_KEY_CACHE = 1284
|
||||
WARN_HOSTNAME_WONT_WORK = 1285
|
||||
UNKNOWN_STORAGE_ENGINE = 1286
|
||||
WARN_DEPRECATED_SYNTAX = 1287
|
||||
NON_UPDATABLE_TABLE = 1288
|
||||
FEATURE_DISABLED = 1289
|
||||
OPTION_PREVENTS_STATEMENT = 1290
|
||||
DUPLICATED_VALUE_IN_TYPE = 1291
|
||||
TRUNCATED_WRONG_VALUE = 1292
|
||||
INVALID_ON_UPDATE = 1294
|
||||
UNSUPPORTED_PS = 1295
|
||||
GET_ERRMSG = 1296
|
||||
GET_TEMPORARY_ERRMSG = 1297
|
||||
UNKNOWN_TIME_ZONE = 1298
|
||||
WARN_INVALID_TIMESTAMP = 1299
|
||||
INVALID_CHARACTER_STRING = 1300
|
||||
WARN_ALLOWED_PACKET_OVERFLOWED = 1301
|
||||
CONFLICTING_DECLARATIONS = 1302
|
||||
SP_NO_RECURSIVE_CREATE = 1303
|
||||
SP_ALREADY_EXISTS = 1304
|
||||
SP_DOES_NOT_EXIST = 1305
|
||||
SP_DROP_FAILED = 1306
|
||||
SP_STORE_FAILED = 1307
|
||||
SP_LILABEL_MISMATCH = 1308
|
||||
SP_LABEL_REDEFINE = 1309
|
||||
SP_LABEL_MISMATCH = 1310
|
||||
SP_UNINIT_VAR = 1311
|
||||
SP_BADSELECT = 1312
|
||||
SP_BADRETURN = 1313
|
||||
SP_BADSTATEMENT = 1314
|
||||
UPDATE_LOG_DEPRECATED_IGNORED = 1315
|
||||
UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
|
||||
QUERY_INTERRUPTED = 1317
|
||||
SP_WRONG_NO_OF_ARGS = 1318
|
||||
SP_COND_MISMATCH = 1319
|
||||
SP_NORETURN = 1320
|
||||
SP_NORETURNEND = 1321
|
||||
SP_BAD_CURSOR_QUERY = 1322
|
||||
SP_BAD_CURSOR_SELECT = 1323
|
||||
SP_CURSOR_MISMATCH = 1324
|
||||
SP_CURSOR_ALREADY_OPEN = 1325
|
||||
SP_CURSOR_NOT_OPEN = 1326
|
||||
SP_UNDECLARED_VAR = 1327
|
||||
SP_WRONG_NO_OF_FETCH_ARGS = 1328
|
||||
SP_FETCH_NO_DATA = 1329
|
||||
SP_DUP_PARAM = 1330
|
||||
SP_DUP_VAR = 1331
|
||||
SP_DUP_COND = 1332
|
||||
SP_DUP_CURS = 1333
|
||||
SP_CANT_ALTER = 1334
|
||||
SP_SUBSELECT_NYI = 1335
|
||||
STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
|
||||
SP_VARCOND_AFTER_CURSHNDLR = 1337
|
||||
SP_CURSOR_AFTER_HANDLER = 1338
|
||||
SP_CASE_NOT_FOUND = 1339
|
||||
FPARSER_TOO_BIG_FILE = 1340
|
||||
FPARSER_BAD_HEADER = 1341
|
||||
FPARSER_EOF_IN_COMMENT = 1342
|
||||
FPARSER_ERROR_IN_PARAMETER = 1343
|
||||
FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
|
||||
VIEW_NO_EXPLAIN = 1345
|
||||
WRONG_OBJECT = 1347
|
||||
NONUPDATEABLE_COLUMN = 1348
|
||||
VIEW_SELECT_CLAUSE = 1350
|
||||
VIEW_SELECT_VARIABLE = 1351
|
||||
VIEW_SELECT_TMPTABLE = 1352
|
||||
VIEW_WRONG_LIST = 1353
|
||||
WARN_VIEW_MERGE = 1354
|
||||
WARN_VIEW_WITHOUT_KEY = 1355
|
||||
VIEW_INVALID = 1356
|
||||
SP_NO_DROP_SP = 1357
|
||||
TRG_ALREADY_EXISTS = 1359
|
||||
TRG_DOES_NOT_EXIST = 1360
|
||||
TRG_ON_VIEW_OR_TEMP_TABLE = 1361
|
||||
TRG_CANT_CHANGE_ROW = 1362
|
||||
TRG_NO_SUCH_ROW_IN_TRG = 1363
|
||||
NO_DEFAULT_FOR_FIELD = 1364
|
||||
DIVISION_BY_ZERO = 1365
|
||||
TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
|
||||
ILLEGAL_VALUE_FOR_TYPE = 1367
|
||||
VIEW_NONUPD_CHECK = 1368
|
||||
VIEW_CHECK_FAILED = 1369
|
||||
PROCACCESS_DENIED_ERROR = 1370
|
||||
RELAY_LOG_FAIL = 1371
|
||||
UNKNOWN_TARGET_BINLOG = 1373
|
||||
IO_ERR_LOG_INDEX_READ = 1374
|
||||
BINLOG_PURGE_PROHIBITED = 1375
|
||||
FSEEK_FAIL = 1376
|
||||
BINLOG_PURGE_FATAL_ERR = 1377
|
||||
LOG_IN_USE = 1378
|
||||
LOG_PURGE_UNKNOWN_ERR = 1379
|
||||
RELAY_LOG_INIT = 1380
|
||||
NO_BINARY_LOGGING = 1381
|
||||
RESERVED_SYNTAX = 1382
|
||||
PS_MANY_PARAM = 1390
|
||||
KEY_PART_0 = 1391
|
||||
VIEW_CHECKSUM = 1392
|
||||
VIEW_MULTIUPDATE = 1393
|
||||
VIEW_NO_INSERT_FIELD_LIST = 1394
|
||||
VIEW_DELETE_MERGE_VIEW = 1395
|
||||
CANNOT_USER = 1396
|
||||
XAER_NOTA = 1397
|
||||
XAER_INVAL = 1398
|
||||
XAER_RMFAIL = 1399
|
||||
XAER_OUTSIDE = 1400
|
||||
XAER_RMERR = 1401
|
||||
XA_RBROLLBACK = 1402
|
||||
NONEXISTING_PROC_GRANT = 1403
|
||||
PROC_AUTO_GRANT_FAIL = 1404
|
||||
PROC_AUTO_REVOKE_FAIL = 1405
|
||||
DATA_TOO_LONG = 1406
|
||||
SP_BAD_SQLSTATE = 1407
|
||||
STARTUP = 1408
|
||||
LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
|
||||
CANT_CREATE_USER_WITH_GRANT = 1410
|
||||
WRONG_VALUE_FOR_TYPE = 1411
|
||||
TABLE_DEF_CHANGED = 1412
|
||||
SP_DUP_HANDLER = 1413
|
||||
SP_NOT_VAR_ARG = 1414
|
||||
SP_NO_RETSET = 1415
|
||||
CANT_CREATE_GEOMETRY_OBJECT = 1416
|
||||
BINLOG_UNSAFE_ROUTINE = 1418
|
||||
BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
|
||||
STMT_HAS_NO_OPEN_CURSOR = 1421
|
||||
COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
|
||||
NO_DEFAULT_FOR_VIEW_FIELD = 1423
|
||||
SP_NO_RECURSION = 1424
|
||||
TOO_BIG_SCALE = 1425
|
||||
TOO_BIG_PRECISION = 1426
|
||||
M_BIGGER_THAN_D = 1427
|
||||
WRONG_LOCK_OF_SYSTEM_TABLE = 1428
|
||||
CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
|
||||
QUERY_ON_FOREIGN_DATA_SOURCE = 1430
|
||||
FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
|
||||
FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
|
||||
FOREIGN_DATA_STRING_INVALID = 1433
|
||||
TRG_IN_WRONG_SCHEMA = 1435
|
||||
STACK_OVERRUN_NEED_MORE = 1436
|
||||
TOO_LONG_BODY = 1437
|
||||
WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
|
||||
TOO_BIG_DISPLAYWIDTH = 1439
|
||||
XAER_DUPID = 1440
|
||||
DATETIME_FUNCTION_OVERFLOW = 1441
|
||||
CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
|
||||
VIEW_PREVENT_UPDATE = 1443
|
||||
PS_NO_RECURSION = 1444
|
||||
SP_CANT_SET_AUTOCOMMIT = 1445
|
||||
VIEW_FRM_NO_USER = 1447
|
||||
VIEW_OTHER_USER = 1448
|
||||
NO_SUCH_USER = 1449
|
||||
FORBID_SCHEMA_CHANGE = 1450
|
||||
ROW_IS_REFERENCED_2 = 1451
|
||||
NO_REFERENCED_ROW_2 = 1452
|
||||
SP_BAD_VAR_SHADOW = 1453
|
||||
TRG_NO_DEFINER = 1454
|
||||
OLD_FILE_FORMAT = 1455
|
||||
SP_RECURSION_LIMIT = 1456
|
||||
SP_WRONG_NAME = 1458
|
||||
TABLE_NEEDS_UPGRADE = 1459
|
||||
SP_NO_AGGREGATE = 1460
|
||||
MAX_PREPARED_STMT_COUNT_REACHED = 1461
|
||||
VIEW_RECURSIVE = 1462
|
||||
NON_GROUPING_FIELD_USED = 1463
|
||||
TABLE_CANT_HANDLE_SPKEYS = 1464
|
||||
NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
|
||||
REMOVED_SPACES = 1466
|
||||
AUTOINC_READ_FAILED = 1467
|
||||
USERNAME = 1468
|
||||
HOSTNAME = 1469
|
||||
WRONG_STRING_LENGTH = 1470
|
||||
NON_INSERTABLE_TABLE = 1471
|
||||
ADMIN_WRONG_MRG_TABLE = 1472
|
||||
TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT = 1473
|
||||
NAME_BECOMES_EMPTY = 1474
|
||||
AMBIGUOUS_FIELD_TERM = 1475
|
||||
FOREIGN_SERVER_EXISTS = 1476
|
||||
FOREIGN_SERVER_DOESNT_EXIST = 1477
|
||||
ILLEGAL_HA_CREATE_OPTION = 1478
|
||||
PARTITION_REQUIRES_VALUES_ERROR = 1479
|
||||
PARTITION_WRONG_VALUES_ERROR = 1480
|
||||
PARTITION_MAXVALUE_ERROR = 1481
|
||||
PARTITION_WRONG_NO_PART_ERROR = 1484
|
||||
PARTITION_WRONG_NO_SUBPART_ERROR = 1485
|
||||
WRONG_EXPR_IN_PARTITION_FUNC_ERROR = 1486
|
||||
FIELD_NOT_FOUND_PART_ERROR = 1488
|
||||
INCONSISTENT_PARTITION_INFO_ERROR = 1490
|
||||
PARTITION_FUNC_NOT_ALLOWED_ERROR = 1491
|
||||
PARTITIONS_MUST_BE_DEFINED_ERROR = 1492
|
||||
RANGE_NOT_INCREASING_ERROR = 1493
|
||||
INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR = 1494
|
||||
MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR = 1495
|
||||
PARTITION_ENTRY_ERROR = 1496
|
||||
MIX_HANDLER_ERROR = 1497
|
||||
PARTITION_NOT_DEFINED_ERROR = 1498
|
||||
TOO_MANY_PARTITIONS_ERROR = 1499
|
||||
SUBPARTITION_ERROR = 1500
|
||||
CANT_CREATE_HANDLER_FILE = 1501
|
||||
BLOB_FIELD_IN_PART_FUNC_ERROR = 1502
|
||||
UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF = 1503
|
||||
NO_PARTS_ERROR = 1504
|
||||
PARTITION_MGMT_ON_NONPARTITIONED = 1505
|
||||
FOREIGN_KEY_ON_PARTITIONED = 1506
|
||||
DROP_PARTITION_NON_EXISTENT = 1507
|
||||
DROP_LAST_PARTITION = 1508
|
||||
COALESCE_ONLY_ON_HASH_PARTITION = 1509
|
||||
REORG_HASH_ONLY_ON_SAME_NO = 1510
|
||||
REORG_NO_PARAM_ERROR = 1511
|
||||
ONLY_ON_RANGE_LIST_PARTITION = 1512
|
||||
ADD_PARTITION_SUBPART_ERROR = 1513
|
||||
ADD_PARTITION_NO_NEW_PARTITION = 1514
|
||||
COALESCE_PARTITION_NO_PARTITION = 1515
|
||||
REORG_PARTITION_NOT_EXIST = 1516
|
||||
SAME_NAME_PARTITION = 1517
|
||||
NO_BINLOG_ERROR = 1518
|
||||
CONSECUTIVE_REORG_PARTITIONS = 1519
|
||||
REORG_OUTSIDE_RANGE = 1520
|
||||
PARTITION_FUNCTION_FAILURE = 1521
|
||||
LIMITED_PART_RANGE = 1523
|
||||
PLUGIN_IS_NOT_LOADED = 1524
|
||||
WRONG_VALUE = 1525
|
||||
NO_PARTITION_FOR_GIVEN_VALUE = 1526
|
||||
FILEGROUP_OPTION_ONLY_ONCE = 1527
|
||||
CREATE_FILEGROUP_FAILED = 1528
|
||||
DROP_FILEGROUP_FAILED = 1529
|
||||
TABLESPACE_AUTO_EXTEND_ERROR = 1530
|
||||
WRONG_SIZE_NUMBER = 1531
|
||||
SIZE_OVERFLOW_ERROR = 1532
|
||||
ALTER_FILEGROUP_FAILED = 1533
|
||||
BINLOG_ROW_LOGGING_FAILED = 1534
|
||||
EVENT_ALREADY_EXISTS = 1537
|
||||
EVENT_DOES_NOT_EXIST = 1539
|
||||
EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG = 1542
|
||||
EVENT_ENDS_BEFORE_STARTS = 1543
|
||||
EVENT_EXEC_TIME_IN_THE_PAST = 1544
|
||||
EVENT_SAME_NAME = 1551
|
||||
DROP_INDEX_FK = 1553
|
||||
WARN_DEPRECATED_SYNTAX_WITH_VER = 1554
|
||||
CANT_LOCK_LOG_TABLE = 1556
|
||||
FOREIGN_DUPLICATE_KEY_OLD_UNUSED = 1557
|
||||
COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE = 1558
|
||||
TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR = 1559
|
||||
STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1560
|
||||
PARTITION_NO_TEMPORARY = 1562
|
||||
PARTITION_CONST_DOMAIN_ERROR = 1563
|
||||
PARTITION_FUNCTION_IS_NOT_ALLOWED = 1564
|
||||
NULL_IN_VALUES_LESS_THAN = 1566
|
||||
WRONG_PARTITION_NAME = 1567
|
||||
CANT_CHANGE_TX_CHARACTERISTICS = 1568
|
||||
DUP_ENTRY_AUTOINCREMENT_CASE = 1569
|
||||
EVENT_SET_VAR_ERROR = 1571
|
||||
PARTITION_MERGE_ERROR = 1572
|
||||
BASE64_DECODE_ERROR = 1575
|
||||
EVENT_RECURSION_FORBIDDEN = 1576
|
||||
ONLY_INTEGERS_ALLOWED = 1578
|
||||
UNSUPORTED_LOG_ENGINE = 1579
|
||||
BAD_LOG_STATEMENT = 1580
|
||||
CANT_RENAME_LOG_TABLE = 1581
|
||||
WRONG_PARAMCOUNT_TO_NATIVE_FCT = 1582
|
||||
WRONG_PARAMETERS_TO_NATIVE_FCT = 1583
|
||||
WRONG_PARAMETERS_TO_STORED_FCT = 1584
|
||||
NATIVE_FCT_NAME_COLLISION = 1585
|
||||
DUP_ENTRY_WITH_KEY_NAME = 1586
|
||||
BINLOG_PURGE_EMFILE = 1587
|
||||
EVENT_CANNOT_CREATE_IN_THE_PAST = 1588
|
||||
EVENT_CANNOT_ALTER_IN_THE_PAST = 1589
|
||||
NO_PARTITION_FOR_GIVEN_VALUE_SILENT = 1591
|
||||
BINLOG_UNSAFE_STATEMENT = 1592
|
||||
BINLOG_FATAL_ERROR = 1593
|
||||
BINLOG_LOGGING_IMPOSSIBLE = 1598
|
||||
VIEW_NO_CREATION_CTX = 1599
|
||||
VIEW_INVALID_CREATION_CTX = 1600
|
||||
TRG_CORRUPTED_FILE = 1602
|
||||
TRG_NO_CREATION_CTX = 1603
|
||||
TRG_INVALID_CREATION_CTX = 1604
|
||||
EVENT_INVALID_CREATION_CTX = 1605
|
||||
TRG_CANT_OPEN_TABLE = 1606
|
||||
NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT = 1609
|
||||
SLAVE_CORRUPT_EVENT = 1610
|
||||
LOG_PURGE_NO_FILE = 1612
|
||||
XA_RBTIMEOUT = 1613
|
||||
XA_RBDEADLOCK = 1614
|
||||
NEED_REPREPARE = 1615
|
||||
WARN_NO_MASTER_INFO = 1617
|
||||
WARN_OPTION_IGNORED = 1618
|
||||
PLUGIN_DELETE_BUILTIN = 1619
|
||||
WARN_PLUGIN_BUSY = 1620
|
||||
VARIABLE_IS_READONLY = 1621
|
||||
WARN_ENGINE_TRANSACTION_ROLLBACK = 1622
|
||||
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE = 1624
|
||||
NDB_REPLICATION_SCHEMA_ERROR = 1625
|
||||
CONFLICT_FN_PARSE_ERROR = 1626
|
||||
EXCEPTIONS_WRITE_ERROR = 1627
|
||||
TOO_LONG_TABLE_COMMENT = 1628
|
||||
TOO_LONG_FIELD_COMMENT = 1629
|
||||
FUNC_INEXISTENT_NAME_COLLISION = 1630
|
||||
DATABASE_NAME = 1631
|
||||
TABLE_NAME = 1632
|
||||
PARTITION_NAME = 1633
|
||||
SUBPARTITION_NAME = 1634
|
||||
TEMPORARY_NAME = 1635
|
||||
RENAMED_NAME = 1636
|
||||
TOO_MANY_CONCURRENT_TRXS = 1637
|
||||
WARN_NON_ASCII_SEPARATOR_NOT_IMPLEMENTED = 1638
|
||||
DEBUG_SYNC_TIMEOUT = 1639
|
||||
DEBUG_SYNC_HIT_LIMIT = 1640
|
||||
DUP_SIGNAL_SET = 1641
|
||||
SIGNAL_WARN = 1642
|
||||
SIGNAL_NOT_FOUND = 1643
|
||||
SIGNAL_EXCEPTION = 1644
|
||||
RESIGNAL_WITHOUT_ACTIVE_HANDLER = 1645
|
||||
SIGNAL_BAD_CONDITION_TYPE = 1646
|
||||
WARN_COND_ITEM_TRUNCATED = 1647
|
||||
COND_ITEM_TOO_LONG = 1648
|
||||
UNKNOWN_LOCALE = 1649
|
||||
SLAVE_IGNORE_SERVER_IDS = 1650
|
||||
SAME_NAME_PARTITION_FIELD = 1652
|
||||
PARTITION_COLUMN_LIST_ERROR = 1653
|
||||
WRONG_TYPE_COLUMN_VALUE_ERROR = 1654
|
||||
TOO_MANY_PARTITION_FUNC_FIELDS_ERROR = 1655
|
||||
MAXVALUE_IN_VALUES_IN = 1656
|
||||
TOO_MANY_VALUES_ERROR = 1657
|
||||
ROW_SINGLE_PARTITION_FIELD_ERROR = 1658
|
||||
FIELD_TYPE_NOT_ALLOWED_AS_PARTITION_FIELD = 1659
|
||||
PARTITION_FIELDS_TOO_LONG = 1660
|
||||
BINLOG_ROW_ENGINE_AND_STMT_ENGINE = 1661
|
||||
BINLOG_ROW_MODE_AND_STMT_ENGINE = 1662
|
||||
BINLOG_UNSAFE_AND_STMT_ENGINE = 1663
|
||||
BINLOG_ROW_INJECTION_AND_STMT_ENGINE = 1664
|
||||
BINLOG_STMT_MODE_AND_ROW_ENGINE = 1665
|
||||
BINLOG_ROW_INJECTION_AND_STMT_MODE = 1666
|
||||
BINLOG_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1667
|
||||
BINLOG_UNSAFE_LIMIT = 1668
|
||||
BINLOG_UNSAFE_SYSTEM_TABLE = 1670
|
||||
BINLOG_UNSAFE_AUTOINC_COLUMNS = 1671
|
||||
BINLOG_UNSAFE_UDF = 1672
|
||||
BINLOG_UNSAFE_SYSTEM_VARIABLE = 1673
|
||||
BINLOG_UNSAFE_SYSTEM_FUNCTION = 1674
|
||||
BINLOG_UNSAFE_NONTRANS_AFTER_TRANS = 1675
|
||||
MESSAGE_AND_STATEMENT = 1676
|
||||
SLAVE_CANT_CREATE_CONVERSION = 1678
|
||||
INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1679
|
||||
PATH_LENGTH = 1680
|
||||
WARN_DEPRECATED_SYNTAX_NO_REPLACEMENT = 1681
|
||||
WRONG_NATIVE_TABLE_STRUCTURE = 1682
|
||||
WRONG_PERFSCHEMA_USAGE = 1683
|
||||
WARN_I_S_SKIPPED_TABLE = 1684
|
||||
INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1685
|
||||
STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1686
|
||||
SPATIAL_MUST_HAVE_GEOM_COL = 1687
|
||||
TOO_LONG_INDEX_COMMENT = 1688
|
||||
LOCK_ABORTED = 1689
|
||||
DATA_OUT_OF_RANGE = 1690
|
||||
WRONG_SPVAR_TYPE_IN_LIMIT = 1691
|
||||
BINLOG_UNSAFE_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1692
|
||||
BINLOG_UNSAFE_MIXED_STATEMENT = 1693
|
||||
INSIDE_TRANSACTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1694
|
||||
STORED_FUNCTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1695
|
||||
FAILED_READ_FROM_PAR_FILE = 1696
|
||||
VALUES_IS_NOT_INT_TYPE_ERROR = 1697
|
||||
ACCESS_DENIED_NO_PASSWORD_ERROR = 1698
|
||||
SET_PASSWORD_AUTH_PLUGIN = 1699
|
||||
TRUNCATE_ILLEGAL_FK = 1701
|
||||
PLUGIN_IS_PERMANENT = 1702
|
||||
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MIN = 1703
|
||||
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MAX = 1704
|
||||
STMT_CACHE_FULL = 1705
|
||||
MULTI_UPDATE_KEY_CONFLICT = 1706
|
||||
TABLE_NEEDS_REBUILD = 1707
|
||||
WARN_OPTION_BELOW_LIMIT = 1708
|
||||
INDEX_COLUMN_TOO_LONG = 1709
|
||||
ERROR_IN_TRIGGER_BODY = 1710
|
||||
ERROR_IN_UNKNOWN_TRIGGER_BODY = 1711
|
||||
INDEX_CORRUPT = 1712
|
||||
UNDO_RECORD_TOO_BIG = 1713
|
||||
BINLOG_UNSAFE_INSERT_IGNORE_SELECT = 1714
|
||||
BINLOG_UNSAFE_INSERT_SELECT_UPDATE = 1715
|
||||
BINLOG_UNSAFE_REPLACE_SELECT = 1716
|
||||
BINLOG_UNSAFE_CREATE_IGNORE_SELECT = 1717
|
||||
BINLOG_UNSAFE_CREATE_REPLACE_SELECT = 1718
|
||||
BINLOG_UNSAFE_UPDATE_IGNORE = 1719
|
||||
PLUGIN_NO_UNINSTALL = 1720
|
||||
PLUGIN_NO_INSTALL = 1721
|
||||
BINLOG_UNSAFE_WRITE_AUTOINC_SELECT = 1722
|
||||
BINLOG_UNSAFE_CREATE_SELECT_AUTOINC = 1723
|
||||
BINLOG_UNSAFE_INSERT_TWO_KEYS = 1724
|
||||
TABLE_IN_FK_CHECK = 1725
|
||||
UNSUPPORTED_ENGINE = 1726
|
||||
BINLOG_UNSAFE_AUTOINC_NOT_FIRST = 1727
|
||||
CANNOT_LOAD_FROM_TABLE_V2 = 1728
|
||||
MASTER_DELAY_VALUE_OUT_OF_RANGE = 1729
|
||||
ONLY_FD_AND_RBR_EVENTS_ALLOWED_IN_BINLOG_STATEMENT = 1730
|
||||
PARTITION_EXCHANGE_DIFFERENT_OPTION = 1731
|
||||
PARTITION_EXCHANGE_PART_TABLE = 1732
|
||||
PARTITION_EXCHANGE_TEMP_TABLE = 1733
|
||||
PARTITION_INSTEAD_OF_SUBPARTITION = 1734
|
||||
UNKNOWN_PARTITION = 1735
|
||||
TABLES_DIFFERENT_METADATA = 1736
|
||||
ROW_DOES_NOT_MATCH_PARTITION = 1737
|
||||
BINLOG_CACHE_SIZE_GREATER_THAN_MAX = 1738
|
||||
WARN_INDEX_NOT_APPLICABLE = 1739
|
||||
PARTITION_EXCHANGE_FOREIGN_KEY = 1740
|
||||
RPL_INFO_DATA_TOO_LONG = 1742
|
||||
BINLOG_STMT_CACHE_SIZE_GREATER_THAN_MAX = 1745
|
||||
CANT_UPDATE_TABLE_IN_CREATE_TABLE_SELECT = 1746
|
||||
PARTITION_CLAUSE_ON_NONPARTITIONED = 1747
|
||||
ROW_DOES_NOT_MATCH_GIVEN_PARTITION_SET = 1748
|
||||
CHANGE_RPL_INFO_REPOSITORY_FAILURE = 1750
|
||||
WARNING_NOT_COMPLETE_ROLLBACK_WITH_CREATED_TEMP_TABLE = 1751
|
||||
WARNING_NOT_COMPLETE_ROLLBACK_WITH_DROPPED_TEMP_TABLE = 1752
|
||||
MTS_FEATURE_IS_NOT_SUPPORTED = 1753
|
||||
MTS_UPDATED_DBS_GREATER_MAX = 1754
|
||||
MTS_CANT_PARALLEL = 1755
|
||||
MTS_INCONSISTENT_DATA = 1756
|
||||
FULLTEXT_NOT_SUPPORTED_WITH_PARTITIONING = 1757
|
||||
DA_INVALID_CONDITION_NUMBER = 1758
|
||||
INSECURE_PLAIN_TEXT = 1759
|
||||
INSECURE_CHANGE_MASTER = 1760
|
||||
FOREIGN_DUPLICATE_KEY_WITH_CHILD_INFO = 1761
|
||||
FOREIGN_DUPLICATE_KEY_WITHOUT_CHILD_INFO = 1762
|
||||
SQLTHREAD_WITH_SECURE_SLAVE = 1763
|
||||
TABLE_HAS_NO_FT = 1764
|
||||
VARIABLE_NOT_SETTABLE_IN_SF_OR_TRIGGER = 1765
|
||||
VARIABLE_NOT_SETTABLE_IN_TRANSACTION = 1766
|
||||
SET_STATEMENT_CANNOT_INVOKE_FUNCTION = 1769
|
||||
GTID_NEXT_CANT_BE_AUTOMATIC_IF_GTID_NEXT_LIST_IS_NON_NULL = 1770
|
||||
MALFORMED_GTID_SET_SPECIFICATION = 1772
|
||||
MALFORMED_GTID_SET_ENCODING = 1773
|
||||
MALFORMED_GTID_SPECIFICATION = 1774
|
||||
GNO_EXHAUSTED = 1775
|
||||
BAD_SLAVE_AUTO_POSITION = 1776
|
||||
AUTO_POSITION_REQUIRES_GTID_MODE_NOT_OFF = 1777
|
||||
CANT_DO_IMPLICIT_COMMIT_IN_TRX_WHEN_GTID_NEXT_IS_SET = 1778
|
||||
GTID_MODE_ON_REQUIRES_ENFORCE_GTID_CONSISTENCY_ON = 1779
|
||||
CANT_SET_GTID_NEXT_TO_GTID_WHEN_GTID_MODE_IS_OFF = 1781
|
||||
CANT_SET_GTID_NEXT_TO_ANONYMOUS_WHEN_GTID_MODE_IS_ON = 1782
|
||||
CANT_SET_GTID_NEXT_LIST_TO_NON_NULL_WHEN_GTID_MODE_IS_OFF = 1783
|
||||
GTID_UNSAFE_NON_TRANSACTIONAL_TABLE = 1785
|
||||
GTID_UNSAFE_CREATE_SELECT = 1786
|
||||
GTID_UNSAFE_CREATE_DROP_TEMPORARY_TABLE_IN_TRANSACTION = 1787
|
||||
GTID_MODE_CAN_ONLY_CHANGE_ONE_STEP_AT_A_TIME = 1788
|
||||
MASTER_HAS_PURGED_REQUIRED_GTIDS = 1789
|
||||
CANT_SET_GTID_NEXT_WHEN_OWNING_GTID = 1790
|
||||
UNKNOWN_EXPLAIN_FORMAT = 1791
|
||||
CANT_EXECUTE_IN_READ_ONLY_TRANSACTION = 1792
|
||||
TOO_LONG_TABLE_PARTITION_COMMENT = 1793
|
||||
SLAVE_CONFIGURATION = 1794
|
||||
INNODB_FT_LIMIT = 1795
|
||||
INNODB_NO_FT_TEMP_TABLE = 1796
|
||||
INNODB_FT_WRONG_DOCID_COLUMN = 1797
|
||||
INNODB_FT_WRONG_DOCID_INDEX = 1798
|
||||
INNODB_ONLINE_LOG_TOO_BIG = 1799
|
||||
UNKNOWN_ALTER_ALGORITHM = 1800
|
||||
UNKNOWN_ALTER_LOCK = 1801
|
||||
MTS_CHANGE_MASTER_CANT_RUN_WITH_GAPS = 1802
|
||||
MTS_RECOVERY_FAILURE = 1803
|
||||
MTS_RESET_WORKERS = 1804
|
||||
COL_COUNT_DOESNT_MATCH_CORRUPTED_V2 = 1805
|
||||
SLAVE_SILENT_RETRY_TRANSACTION = 1806
|
||||
DISCARD_FK_CHECKS_RUNNING = 1807
|
||||
TABLE_SCHEMA_MISMATCH = 1808
|
||||
TABLE_IN_SYSTEM_TABLESPACE = 1809
|
||||
IO_READ_ERROR = 1810
|
||||
IO_WRITE_ERROR = 1811
|
||||
TABLESPACE_MISSING = 1812
|
||||
TABLESPACE_EXISTS = 1813
|
||||
TABLESPACE_DISCARDED = 1814
|
||||
INTERNAL_ERROR = 1815
|
||||
INNODB_IMPORT_ERROR = 1816
|
||||
INNODB_INDEX_CORRUPT = 1817
|
||||
INVALID_YEAR_COLUMN_LENGTH = 1818
|
||||
NOT_VALID_PASSWORD = 1819
|
||||
MUST_CHANGE_PASSWORD = 1820
|
||||
FK_NO_INDEX_CHILD = 1821
|
||||
FK_NO_INDEX_PARENT = 1822
|
||||
FK_FAIL_ADD_SYSTEM = 1823
|
||||
FK_CANNOT_OPEN_PARENT = 1824
|
||||
FK_INCORRECT_OPTION = 1825
|
||||
FK_DUP_NAME = 1826
|
||||
PASSWORD_FORMAT = 1827
|
||||
FK_COLUMN_CANNOT_DROP = 1828
|
||||
FK_COLUMN_CANNOT_DROP_CHILD = 1829
|
||||
FK_COLUMN_NOT_NULL = 1830
|
||||
DUP_INDEX = 1831
|
||||
FK_COLUMN_CANNOT_CHANGE = 1832
|
||||
FK_COLUMN_CANNOT_CHANGE_CHILD = 1833
|
||||
MALFORMED_PACKET = 1835
|
||||
READ_ONLY_MODE = 1836
|
||||
GTID_NEXT_TYPE_UNDEFINED_GTID = 1837
|
||||
VARIABLE_NOT_SETTABLE_IN_SP = 1838
|
||||
CANT_SET_GTID_PURGED_WHEN_GTID_EXECUTED_IS_NOT_EMPTY = 1840
|
||||
CANT_SET_GTID_PURGED_WHEN_OWNED_GTIDS_IS_NOT_EMPTY = 1841
|
||||
GTID_PURGED_WAS_CHANGED = 1842
|
||||
GTID_EXECUTED_WAS_CHANGED = 1843
|
||||
BINLOG_STMT_MODE_AND_NO_REPL_TABLES = 1844
|
||||
ALTER_OPERATION_NOT_SUPPORTED = 1845
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON = 1846
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_COPY = 1847
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_PARTITION = 1848
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_RENAME = 1849
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_COLUMN_TYPE = 1850
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_CHECK = 1851
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_NOPK = 1853
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_AUTOINC = 1854
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_HIDDEN_FTS = 1855
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_CHANGE_FTS = 1856
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_FTS = 1857
|
||||
SQL_SLAVE_SKIP_COUNTER_NOT_SETTABLE_IN_GTID_MODE = 1858
|
||||
DUP_UNKNOWN_IN_INDEX = 1859
|
||||
IDENT_CAUSES_TOO_LONG_PATH = 1860
|
||||
ALTER_OPERATION_NOT_SUPPORTED_REASON_NOT_NULL = 1861
|
||||
MUST_CHANGE_PASSWORD_LOGIN = 1862
|
||||
ROW_IN_WRONG_PARTITION = 1863
|
||||
MTS_EVENT_BIGGER_PENDING_JOBS_SIZE_MAX = 1864
|
||||
BINLOG_LOGICAL_CORRUPTION = 1866
|
||||
WARN_PURGE_LOG_IN_USE = 1867
|
||||
WARN_PURGE_LOG_IS_ACTIVE = 1868
|
||||
AUTO_INCREMENT_CONFLICT = 1869
|
||||
WARN_ON_BLOCKHOLE_IN_RBR = 1870
|
||||
SLAVE_MI_INIT_REPOSITORY = 1871
|
||||
SLAVE_RLI_INIT_REPOSITORY = 1872
|
||||
ACCESS_DENIED_CHANGE_USER_ERROR = 1873
|
||||
INNODB_READ_ONLY = 1874
|
||||
STOP_SLAVE_SQL_THREAD_TIMEOUT = 1875
|
||||
STOP_SLAVE_IO_THREAD_TIMEOUT = 1876
|
||||
TABLE_CORRUPT = 1877
|
||||
TEMP_FILE_WRITE_FAILURE = 1878
|
||||
INNODB_FT_AUX_NOT_HEX_ID = 1879
|
||||
OLD_TEMPORALS_UPGRADED = 1880
|
||||
INNODB_FORCED_RECOVERY = 1881
|
||||
AES_INVALID_IV = 1882
|
||||
PLUGIN_CANNOT_BE_UNINSTALLED = 1883
|
||||
GTID_UNSAFE_BINLOG_SPLITTABLE_STATEMENT_AND_ASSIGNED_GTID = 1884
|
||||
SLAVE_HAS_MORE_GTIDS_THAN_MASTER = 1885
|
||||
MISSING_KEY = 1886
|
||||
ERROR_LAST = 1973
|
||||
@ -0,0 +1,40 @@
|
||||
"""MySQL FIELD_TYPE Constants
|
||||
|
||||
These constants represent the various column (field) types that are
|
||||
supported by MySQL.
|
||||
"""
|
||||
|
||||
DECIMAL = 0
|
||||
TINY = 1
|
||||
SHORT = 2
|
||||
LONG = 3
|
||||
FLOAT = 4
|
||||
DOUBLE = 5
|
||||
NULL = 6
|
||||
TIMESTAMP = 7
|
||||
LONGLONG = 8
|
||||
INT24 = 9
|
||||
DATE = 10
|
||||
TIME = 11
|
||||
DATETIME = 12
|
||||
YEAR = 13
|
||||
# NEWDATE = 14 # Internal to MySQL.
|
||||
VARCHAR = 15
|
||||
BIT = 16
|
||||
# TIMESTAMP2 = 17
|
||||
# DATETIME2 = 18
|
||||
# TIME2 = 19
|
||||
JSON = 245
|
||||
NEWDECIMAL = 246
|
||||
ENUM = 247
|
||||
SET = 248
|
||||
TINY_BLOB = 249
|
||||
MEDIUM_BLOB = 250
|
||||
LONG_BLOB = 251
|
||||
BLOB = 252
|
||||
VAR_STRING = 253
|
||||
STRING = 254
|
||||
GEOMETRY = 255
|
||||
|
||||
CHAR = TINY
|
||||
INTERVAL = ENUM
|
||||
23
venv/lib/python3.11/site-packages/MySQLdb/constants/FLAG.py
Normal file
23
venv/lib/python3.11/site-packages/MySQLdb/constants/FLAG.py
Normal file
@ -0,0 +1,23 @@
|
||||
"""MySQL FLAG Constants
|
||||
|
||||
These flags are used along with the FIELD_TYPE to indicate various
|
||||
properties of columns in a result set.
|
||||
|
||||
"""
|
||||
|
||||
NOT_NULL = 1
|
||||
PRI_KEY = 2
|
||||
UNIQUE_KEY = 4
|
||||
MULTIPLE_KEY = 8
|
||||
BLOB = 16
|
||||
UNSIGNED = 32
|
||||
ZEROFILL = 64
|
||||
BINARY = 128
|
||||
ENUM = 256
|
||||
AUTO_INCREMENT = 512
|
||||
TIMESTAMP = 1024
|
||||
SET = 2048
|
||||
NUM = 32768
|
||||
PART_KEY = 16384
|
||||
GROUP = 32768
|
||||
UNIQUE = 65536
|
||||
@ -0,0 +1 @@
|
||||
__all__ = ["CR", "FIELD_TYPE", "CLIENT", "ER", "FLAG"]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
139
venv/lib/python3.11/site-packages/MySQLdb/converters.py
Normal file
139
venv/lib/python3.11/site-packages/MySQLdb/converters.py
Normal file
@ -0,0 +1,139 @@
|
||||
"""MySQLdb type conversion module
|
||||
|
||||
This module handles all the type conversions for MySQL. If the default
|
||||
type conversions aren't what you need, you can make your own. The
|
||||
dictionary conversions maps some kind of type to a conversion function
|
||||
which returns the corresponding value:
|
||||
|
||||
Key: FIELD_TYPE.* (from MySQLdb.constants)
|
||||
|
||||
Conversion function:
|
||||
|
||||
Arguments: string
|
||||
|
||||
Returns: Python object
|
||||
|
||||
Key: Python type object (from types) or class
|
||||
|
||||
Conversion function:
|
||||
|
||||
Arguments: Python object of indicated type or class AND
|
||||
conversion dictionary
|
||||
|
||||
Returns: SQL literal value
|
||||
|
||||
Notes: Most conversion functions can ignore the dictionary, but
|
||||
it is a required parameter. It is necessary for converting
|
||||
things like sequences and instances.
|
||||
|
||||
Don't modify conversions if you can avoid it. Instead, make copies
|
||||
(with the copy() method), modify the copies, and then pass them to
|
||||
MySQL.connect().
|
||||
"""
|
||||
from decimal import Decimal
|
||||
|
||||
from MySQLdb._mysql import string_literal
|
||||
from MySQLdb.constants import FIELD_TYPE, FLAG
|
||||
from MySQLdb.times import (
|
||||
Date,
|
||||
DateTimeType,
|
||||
DateTime2literal,
|
||||
DateTimeDeltaType,
|
||||
DateTimeDelta2literal,
|
||||
DateTime_or_None,
|
||||
TimeDelta_or_None,
|
||||
Date_or_None,
|
||||
)
|
||||
from MySQLdb._exceptions import ProgrammingError
|
||||
|
||||
import array
|
||||
|
||||
NoneType = type(None)
|
||||
|
||||
try:
|
||||
ArrayType = array.ArrayType
|
||||
except AttributeError:
|
||||
ArrayType = array.array
|
||||
|
||||
|
||||
def Bool2Str(s, d):
|
||||
return b"1" if s else b"0"
|
||||
|
||||
|
||||
def Set2Str(s, d):
|
||||
# Only support ascii string. Not tested.
|
||||
return string_literal(",".join(s))
|
||||
|
||||
|
||||
def Thing2Str(s, d):
|
||||
"""Convert something into a string via str()."""
|
||||
return str(s)
|
||||
|
||||
|
||||
def Float2Str(o, d):
|
||||
s = repr(o)
|
||||
if s in ("inf", "-inf", "nan"):
|
||||
raise ProgrammingError("%s can not be used with MySQL" % s)
|
||||
if "e" not in s:
|
||||
s += "e0"
|
||||
return s
|
||||
|
||||
|
||||
def None2NULL(o, d):
|
||||
"""Convert None to NULL."""
|
||||
return b"NULL"
|
||||
|
||||
|
||||
def Thing2Literal(o, d):
|
||||
"""Convert something into a SQL string literal. If using
|
||||
MySQL-3.23 or newer, string_literal() is a method of the
|
||||
_mysql.MYSQL object, and this function will be overridden with
|
||||
that method when the connection is created."""
|
||||
return string_literal(o)
|
||||
|
||||
|
||||
def Decimal2Literal(o, d):
|
||||
return format(o, "f")
|
||||
|
||||
|
||||
def array2Str(o, d):
|
||||
return Thing2Literal(o.tostring(), d)
|
||||
|
||||
|
||||
# bytes or str regarding to BINARY_FLAG.
|
||||
_bytes_or_str = ((FLAG.BINARY, bytes), (None, str))
|
||||
|
||||
conversions = {
|
||||
int: Thing2Str,
|
||||
float: Float2Str,
|
||||
NoneType: None2NULL,
|
||||
ArrayType: array2Str,
|
||||
bool: Bool2Str,
|
||||
Date: Thing2Literal,
|
||||
DateTimeType: DateTime2literal,
|
||||
DateTimeDeltaType: DateTimeDelta2literal,
|
||||
set: Set2Str,
|
||||
Decimal: Decimal2Literal,
|
||||
FIELD_TYPE.TINY: int,
|
||||
FIELD_TYPE.SHORT: int,
|
||||
FIELD_TYPE.LONG: int,
|
||||
FIELD_TYPE.FLOAT: float,
|
||||
FIELD_TYPE.DOUBLE: float,
|
||||
FIELD_TYPE.DECIMAL: Decimal,
|
||||
FIELD_TYPE.NEWDECIMAL: Decimal,
|
||||
FIELD_TYPE.LONGLONG: int,
|
||||
FIELD_TYPE.INT24: int,
|
||||
FIELD_TYPE.YEAR: int,
|
||||
FIELD_TYPE.TIMESTAMP: DateTime_or_None,
|
||||
FIELD_TYPE.DATETIME: DateTime_or_None,
|
||||
FIELD_TYPE.TIME: TimeDelta_or_None,
|
||||
FIELD_TYPE.DATE: Date_or_None,
|
||||
FIELD_TYPE.TINY_BLOB: bytes,
|
||||
FIELD_TYPE.MEDIUM_BLOB: bytes,
|
||||
FIELD_TYPE.LONG_BLOB: bytes,
|
||||
FIELD_TYPE.BLOB: bytes,
|
||||
FIELD_TYPE.STRING: bytes,
|
||||
FIELD_TYPE.VAR_STRING: bytes,
|
||||
FIELD_TYPE.VARCHAR: bytes,
|
||||
FIELD_TYPE.JSON: bytes,
|
||||
}
|
||||
500
venv/lib/python3.11/site-packages/MySQLdb/cursors.py
Normal file
500
venv/lib/python3.11/site-packages/MySQLdb/cursors.py
Normal file
@ -0,0 +1,500 @@
|
||||
"""MySQLdb Cursors
|
||||
|
||||
This module implements Cursors of various types for MySQLdb. By
|
||||
default, MySQLdb uses the Cursor class.
|
||||
"""
|
||||
import re
|
||||
|
||||
from ._exceptions import ProgrammingError
|
||||
|
||||
|
||||
#: Regular expression for ``Cursor.executemany```.
|
||||
#: executemany only supports simple bulk insert.
|
||||
#: You can use it to load large dataset.
|
||||
RE_INSERT_VALUES = re.compile(
|
||||
"".join(
|
||||
[
|
||||
r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)",
|
||||
r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))",
|
||||
r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
|
||||
]
|
||||
),
|
||||
re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
|
||||
|
||||
class BaseCursor:
|
||||
"""A base for Cursor classes. Useful attributes:
|
||||
|
||||
description
|
||||
A tuple of DB API 7-tuples describing the columns in
|
||||
the last executed query; see PEP-249 for details.
|
||||
|
||||
description_flags
|
||||
Tuple of column flags for last query, one entry per column
|
||||
in the result set. Values correspond to those in
|
||||
MySQLdb.constants.FLAG. See MySQL documentation (C API)
|
||||
for more information. Non-standard extension.
|
||||
|
||||
arraysize
|
||||
default number of rows fetchmany() will fetch
|
||||
"""
|
||||
|
||||
#: Max statement size which :meth:`executemany` generates.
|
||||
#:
|
||||
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
|
||||
#: Default value of max_allowed_packet is 1048576.
|
||||
max_stmt_length = 64 * 1024
|
||||
|
||||
from ._exceptions import (
|
||||
MySQLError,
|
||||
Warning,
|
||||
Error,
|
||||
InterfaceError,
|
||||
DatabaseError,
|
||||
DataError,
|
||||
OperationalError,
|
||||
IntegrityError,
|
||||
InternalError,
|
||||
ProgrammingError,
|
||||
NotSupportedError,
|
||||
)
|
||||
|
||||
connection = None
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
self.description = None
|
||||
self.description_flags = None
|
||||
self.rowcount = 0
|
||||
self.arraysize = 1
|
||||
self._executed = None
|
||||
|
||||
self.lastrowid = None
|
||||
self._result = None
|
||||
self.rownumber = None
|
||||
self._rows = None
|
||||
|
||||
def _discard(self):
|
||||
self.description = None
|
||||
self.description_flags = None
|
||||
# Django uses some member after __exit__.
|
||||
# So we keep rowcount and lastrowid here. They are cleared in Cursor._query().
|
||||
# self.rowcount = 0
|
||||
# self.lastrowid = None
|
||||
self._rows = None
|
||||
self.rownumber = None
|
||||
|
||||
if self._result:
|
||||
self._result.discard()
|
||||
self._result = None
|
||||
|
||||
con = self.connection
|
||||
if con is None:
|
||||
return
|
||||
while con.next_result() == 0: # -1 means no more data.
|
||||
con.discard_result()
|
||||
|
||||
def close(self):
|
||||
"""Close the cursor. No further queries will be possible."""
|
||||
try:
|
||||
if self.connection is None:
|
||||
return
|
||||
self._discard()
|
||||
finally:
|
||||
self.connection = None
|
||||
self._result = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
del exc_info
|
||||
self.close()
|
||||
|
||||
def _check_executed(self):
|
||||
if not self._executed:
|
||||
raise ProgrammingError("execute() first")
|
||||
|
||||
def nextset(self):
|
||||
"""Advance to the next result set.
|
||||
|
||||
Returns None if there are no more result sets.
|
||||
"""
|
||||
if self._executed:
|
||||
self.fetchall()
|
||||
|
||||
db = self._get_db()
|
||||
nr = db.next_result()
|
||||
if nr == -1:
|
||||
return None
|
||||
self._do_get_result(db)
|
||||
self._post_get_result()
|
||||
return 1
|
||||
|
||||
def _do_get_result(self, db):
|
||||
self._result = result = self._get_result()
|
||||
if result is None:
|
||||
self.description = self.description_flags = None
|
||||
else:
|
||||
self.description = result.describe()
|
||||
self.description_flags = result.field_flags()
|
||||
|
||||
self.rowcount = db.affected_rows()
|
||||
self.rownumber = 0
|
||||
self.lastrowid = db.insert_id()
|
||||
|
||||
def _post_get_result(self):
|
||||
pass
|
||||
|
||||
def setinputsizes(self, *args):
|
||||
"""Does nothing, required by DB API."""
|
||||
|
||||
def setoutputsizes(self, *args):
|
||||
"""Does nothing, required by DB API."""
|
||||
|
||||
def _get_db(self):
|
||||
con = self.connection
|
||||
if con is None:
|
||||
raise ProgrammingError("cursor closed")
|
||||
return con
|
||||
|
||||
def execute(self, query, args=None):
|
||||
"""Execute a query.
|
||||
|
||||
query -- string, query to execute on server
|
||||
args -- optional sequence or mapping, parameters to use with query.
|
||||
|
||||
Note: If args is a sequence, then %s must be used as the
|
||||
parameter placeholder in the query. If a mapping is used,
|
||||
%(key)s must be used as the placeholder.
|
||||
|
||||
Returns integer represents rows affected, if any
|
||||
"""
|
||||
self._discard()
|
||||
|
||||
mogrified_query = self._mogrify(query, args)
|
||||
|
||||
assert isinstance(mogrified_query, (bytes, bytearray))
|
||||
res = self._query(mogrified_query)
|
||||
return res
|
||||
|
||||
def _mogrify(self, query, args=None):
|
||||
"""Return query after binding args."""
|
||||
db = self._get_db()
|
||||
|
||||
if isinstance(query, str):
|
||||
query = query.encode(db.encoding)
|
||||
|
||||
if args is not None:
|
||||
if isinstance(args, dict):
|
||||
nargs = {}
|
||||
for key, item in args.items():
|
||||
if isinstance(key, str):
|
||||
key = key.encode(db.encoding)
|
||||
nargs[key] = db.literal(item)
|
||||
args = nargs
|
||||
else:
|
||||
args = tuple(map(db.literal, args))
|
||||
try:
|
||||
query = query % args
|
||||
except TypeError as m:
|
||||
raise ProgrammingError(str(m))
|
||||
|
||||
return query
|
||||
|
||||
def mogrify(self, query, args=None):
|
||||
"""Return query after binding args.
|
||||
|
||||
query -- string, query to mogrify
|
||||
args -- optional sequence or mapping, parameters to use with query.
|
||||
|
||||
Note: If args is a sequence, then %s must be used as the
|
||||
parameter placeholder in the query. If a mapping is used,
|
||||
%(key)s must be used as the placeholder.
|
||||
|
||||
Returns string representing query that would be executed by the server
|
||||
"""
|
||||
return self._mogrify(query, args).decode(self._get_db().encoding)
|
||||
|
||||
def executemany(self, query, args):
|
||||
# type: (str, list) -> int
|
||||
"""Execute a multi-row query.
|
||||
|
||||
:param query: query to execute on server
|
||||
:param args: Sequence of sequences or mappings. It is used as parameter.
|
||||
:return: Number of rows affected, if any.
|
||||
|
||||
This method improves performance on multiple-row INSERT and
|
||||
REPLACE. Otherwise it is equivalent to looping over args with
|
||||
execute().
|
||||
"""
|
||||
if not args:
|
||||
return
|
||||
|
||||
m = RE_INSERT_VALUES.match(query)
|
||||
if m:
|
||||
q_prefix = m.group(1) % ()
|
||||
q_values = m.group(2).rstrip()
|
||||
q_postfix = m.group(3) or ""
|
||||
assert q_values[0] == "(" and q_values[-1] == ")"
|
||||
return self._do_execute_many(
|
||||
q_prefix,
|
||||
q_values,
|
||||
q_postfix,
|
||||
args,
|
||||
self.max_stmt_length,
|
||||
self._get_db().encoding,
|
||||
)
|
||||
|
||||
self.rowcount = sum(self.execute(query, arg) for arg in args)
|
||||
return self.rowcount
|
||||
|
||||
def _do_execute_many(
|
||||
self, prefix, values, postfix, args, max_stmt_length, encoding
|
||||
):
|
||||
if isinstance(prefix, str):
|
||||
prefix = prefix.encode(encoding)
|
||||
if isinstance(values, str):
|
||||
values = values.encode(encoding)
|
||||
if isinstance(postfix, str):
|
||||
postfix = postfix.encode(encoding)
|
||||
sql = bytearray(prefix)
|
||||
args = iter(args)
|
||||
v = self._mogrify(values, next(args))
|
||||
sql += v
|
||||
rows = 0
|
||||
for arg in args:
|
||||
v = self._mogrify(values, arg)
|
||||
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
|
||||
rows += self.execute(sql + postfix)
|
||||
sql = bytearray(prefix)
|
||||
else:
|
||||
sql += b","
|
||||
sql += v
|
||||
rows += self.execute(sql + postfix)
|
||||
self.rowcount = rows
|
||||
return rows
|
||||
|
||||
def callproc(self, procname, args=()):
|
||||
"""Execute stored procedure procname with args
|
||||
|
||||
procname -- string, name of procedure to execute on server
|
||||
|
||||
args -- Sequence of parameters to use with procedure
|
||||
|
||||
Returns the original args.
|
||||
|
||||
Compatibility warning: PEP-249 specifies that any modified
|
||||
parameters must be returned. This is currently impossible
|
||||
as they are only available by storing them in a server
|
||||
variable and then retrieved by a query. Since stored
|
||||
procedures return zero or more result sets, there is no
|
||||
reliable way to get at OUT or INOUT parameters via callproc.
|
||||
The server variables are named @_procname_n, where procname
|
||||
is the parameter above and n is the position of the parameter
|
||||
(from zero). Once all result sets generated by the procedure
|
||||
have been fetched, you can issue a SELECT @_procname_0, ...
|
||||
query using .execute() to get any OUT or INOUT values.
|
||||
|
||||
Compatibility warning: The act of calling a stored procedure
|
||||
itself creates an empty result set. This appears after any
|
||||
result sets generated by the procedure. This is non-standard
|
||||
behavior with respect to the DB-API. Be sure to use nextset()
|
||||
to advance through all result sets; otherwise you may get
|
||||
disconnected.
|
||||
"""
|
||||
db = self._get_db()
|
||||
if isinstance(procname, str):
|
||||
procname = procname.encode(db.encoding)
|
||||
if args:
|
||||
fmt = b"@_" + procname + b"_%d=%s"
|
||||
q = b"SET %s" % b",".join(
|
||||
fmt % (index, db.literal(arg)) for index, arg in enumerate(args)
|
||||
)
|
||||
self._query(q)
|
||||
self.nextset()
|
||||
|
||||
q = b"CALL %s(%s)" % (
|
||||
procname,
|
||||
b",".join([b"@_%s_%d" % (procname, i) for i in range(len(args))]),
|
||||
)
|
||||
self._query(q)
|
||||
return args
|
||||
|
||||
def _query(self, q):
|
||||
db = self._get_db()
|
||||
self._result = None
|
||||
self.rowcount = None
|
||||
self.lastrowid = None
|
||||
db.query(q)
|
||||
self._do_get_result(db)
|
||||
self._post_get_result()
|
||||
self._executed = q
|
||||
return self.rowcount
|
||||
|
||||
def _fetch_row(self, size=1):
|
||||
if not self._result:
|
||||
return ()
|
||||
return self._result.fetch_row(size, self._fetch_type)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.fetchone, None)
|
||||
|
||||
Warning = Warning
|
||||
Error = Error
|
||||
InterfaceError = InterfaceError
|
||||
DatabaseError = DatabaseError
|
||||
DataError = DataError
|
||||
OperationalError = OperationalError
|
||||
IntegrityError = IntegrityError
|
||||
InternalError = InternalError
|
||||
ProgrammingError = ProgrammingError
|
||||
NotSupportedError = NotSupportedError
|
||||
|
||||
|
||||
class CursorStoreResultMixIn:
|
||||
"""This is a MixIn class which causes the entire result set to be
|
||||
stored on the client side, i.e. it uses mysql_store_result(). If the
|
||||
result set can be very large, consider adding a LIMIT clause to your
|
||||
query, or using CursorUseResultMixIn instead."""
|
||||
|
||||
def _get_result(self):
|
||||
return self._get_db().store_result()
|
||||
|
||||
def _post_get_result(self):
|
||||
self._rows = self._fetch_row(0)
|
||||
self._result = None
|
||||
|
||||
def fetchone(self):
|
||||
"""Fetches a single row from the cursor. None indicates that
|
||||
no more rows are available."""
|
||||
self._check_executed()
|
||||
if self.rownumber >= len(self._rows):
|
||||
return None
|
||||
result = self._rows[self.rownumber]
|
||||
self.rownumber = self.rownumber + 1
|
||||
return result
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
"""Fetch up to size rows from the cursor. Result set may be smaller
|
||||
than size. If size is not defined, cursor.arraysize is used."""
|
||||
self._check_executed()
|
||||
end = self.rownumber + (size or self.arraysize)
|
||||
result = self._rows[self.rownumber : end]
|
||||
self.rownumber = min(end, len(self._rows))
|
||||
return result
|
||||
|
||||
def fetchall(self):
|
||||
"""Fetches all available rows from the cursor."""
|
||||
self._check_executed()
|
||||
if self.rownumber:
|
||||
result = self._rows[self.rownumber :]
|
||||
else:
|
||||
result = self._rows
|
||||
self.rownumber = len(self._rows)
|
||||
return result
|
||||
|
||||
def scroll(self, value, mode="relative"):
|
||||
"""Scroll the cursor in the result set to a new position according
|
||||
to mode.
|
||||
|
||||
If mode is 'relative' (default), value is taken as offset to
|
||||
the current position in the result set, if set to 'absolute',
|
||||
value states an absolute target position."""
|
||||
self._check_executed()
|
||||
if mode == "relative":
|
||||
r = self.rownumber + value
|
||||
elif mode == "absolute":
|
||||
r = value
|
||||
else:
|
||||
raise ProgrammingError("unknown scroll mode %s" % repr(mode))
|
||||
if r < 0 or r >= len(self._rows):
|
||||
raise IndexError("out of range")
|
||||
self.rownumber = r
|
||||
|
||||
def __iter__(self):
|
||||
self._check_executed()
|
||||
result = self.rownumber and self._rows[self.rownumber :] or self._rows
|
||||
return iter(result)
|
||||
|
||||
|
||||
class CursorUseResultMixIn:
|
||||
|
||||
"""This is a MixIn class which causes the result set to be stored
|
||||
in the server and sent row-by-row to client side, i.e. it uses
|
||||
mysql_use_result(). You MUST retrieve the entire result set and
|
||||
close() the cursor before additional queries can be performed on
|
||||
the connection."""
|
||||
|
||||
def _get_result(self):
|
||||
return self._get_db().use_result()
|
||||
|
||||
def fetchone(self):
|
||||
"""Fetches a single row from the cursor."""
|
||||
self._check_executed()
|
||||
r = self._fetch_row(1)
|
||||
if not r:
|
||||
return None
|
||||
self.rownumber = self.rownumber + 1
|
||||
return r[0]
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
"""Fetch up to size rows from the cursor. Result set may be smaller
|
||||
than size. If size is not defined, cursor.arraysize is used."""
|
||||
self._check_executed()
|
||||
r = self._fetch_row(size or self.arraysize)
|
||||
self.rownumber = self.rownumber + len(r)
|
||||
return r
|
||||
|
||||
def fetchall(self):
|
||||
"""Fetches all available rows from the cursor."""
|
||||
self._check_executed()
|
||||
r = self._fetch_row(0)
|
||||
self.rownumber = self.rownumber + len(r)
|
||||
return r
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
row = self.fetchone()
|
||||
if row is None:
|
||||
raise StopIteration
|
||||
return row
|
||||
|
||||
__next__ = next
|
||||
|
||||
|
||||
class CursorTupleRowsMixIn:
|
||||
"""This is a MixIn class that causes all rows to be returned as tuples,
|
||||
which is the standard form required by DB API."""
|
||||
|
||||
_fetch_type = 0
|
||||
|
||||
|
||||
class CursorDictRowsMixIn:
|
||||
"""This is a MixIn class that causes all rows to be returned as
|
||||
dictionaries. This is a non-standard feature."""
|
||||
|
||||
_fetch_type = 1
|
||||
|
||||
|
||||
class Cursor(CursorStoreResultMixIn, CursorTupleRowsMixIn, BaseCursor):
|
||||
"""This is the standard Cursor class that returns rows as tuples
|
||||
and stores the result set in the client."""
|
||||
|
||||
|
||||
class DictCursor(CursorStoreResultMixIn, CursorDictRowsMixIn, BaseCursor):
|
||||
"""This is a Cursor class that returns rows as dictionaries and
|
||||
stores the result set in the client."""
|
||||
|
||||
|
||||
class SSCursor(CursorUseResultMixIn, CursorTupleRowsMixIn, BaseCursor):
|
||||
"""This is a Cursor class that returns rows as tuples and stores
|
||||
the result set in the server."""
|
||||
|
||||
|
||||
class SSDictCursor(CursorUseResultMixIn, CursorDictRowsMixIn, BaseCursor):
|
||||
"""This is a Cursor class that returns rows as dictionaries and
|
||||
stores the result set in the server."""
|
||||
3
venv/lib/python3.11/site-packages/MySQLdb/release.py
Normal file
3
venv/lib/python3.11/site-packages/MySQLdb/release.py
Normal file
@ -0,0 +1,3 @@
|
||||
__author__ = "Inada Naoki <songofacandy@gmail.com>"
|
||||
__version__ = "2.2.7"
|
||||
version_info = (2, 2, 7, "final", 0)
|
||||
150
venv/lib/python3.11/site-packages/MySQLdb/times.py
Normal file
150
venv/lib/python3.11/site-packages/MySQLdb/times.py
Normal file
@ -0,0 +1,150 @@
|
||||
"""times module
|
||||
|
||||
This module provides some Date and Time classes for dealing with MySQL data.
|
||||
|
||||
Use Python datetime module to handle date and time columns.
|
||||
"""
|
||||
from time import localtime
|
||||
from datetime import date, datetime, time, timedelta
|
||||
from MySQLdb._mysql import string_literal
|
||||
|
||||
Date = date
|
||||
Time = time
|
||||
TimeDelta = timedelta
|
||||
Timestamp = datetime
|
||||
|
||||
DateTimeDeltaType = timedelta
|
||||
DateTimeType = datetime
|
||||
|
||||
|
||||
def DateFromTicks(ticks):
|
||||
"""Convert UNIX ticks into a date instance."""
|
||||
return date(*localtime(ticks)[:3])
|
||||
|
||||
|
||||
def TimeFromTicks(ticks):
|
||||
"""Convert UNIX ticks into a time instance."""
|
||||
return time(*localtime(ticks)[3:6])
|
||||
|
||||
|
||||
def TimestampFromTicks(ticks):
|
||||
"""Convert UNIX ticks into a datetime instance."""
|
||||
return datetime(*localtime(ticks)[:6])
|
||||
|
||||
|
||||
format_TIME = format_DATE = str
|
||||
|
||||
|
||||
def format_TIMEDELTA(v):
|
||||
seconds = int(v.seconds) % 60
|
||||
minutes = int(v.seconds // 60) % 60
|
||||
hours = int(v.seconds // 3600) % 24
|
||||
return "%d %d:%d:%d" % (v.days, hours, minutes, seconds)
|
||||
|
||||
|
||||
def format_TIMESTAMP(d):
|
||||
"""
|
||||
:type d: datetime.datetime
|
||||
"""
|
||||
if d.microsecond:
|
||||
fmt = " ".join(
|
||||
[
|
||||
"{0.year:04}-{0.month:02}-{0.day:02}",
|
||||
"{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}",
|
||||
]
|
||||
)
|
||||
else:
|
||||
fmt = " ".join(
|
||||
[
|
||||
"{0.year:04}-{0.month:02}-{0.day:02}",
|
||||
"{0.hour:02}:{0.minute:02}:{0.second:02}",
|
||||
]
|
||||
)
|
||||
return fmt.format(d)
|
||||
|
||||
|
||||
def DateTime_or_None(s):
|
||||
try:
|
||||
if len(s) < 11:
|
||||
return Date_or_None(s)
|
||||
|
||||
micros = s[20:]
|
||||
|
||||
if len(micros) == 0:
|
||||
# 12:00:00
|
||||
micros = 0
|
||||
elif len(micros) < 7:
|
||||
# 12:00:00.123456
|
||||
micros = int(micros) * 10 ** (6 - len(micros))
|
||||
else:
|
||||
return None
|
||||
|
||||
return datetime(
|
||||
int(s[:4]), # year
|
||||
int(s[5:7]), # month
|
||||
int(s[8:10]), # day
|
||||
int(s[11:13] or 0), # hour
|
||||
int(s[14:16] or 0), # minute
|
||||
int(s[17:19] or 0), # second
|
||||
micros, # microsecond
|
||||
)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def TimeDelta_or_None(s):
|
||||
try:
|
||||
h, m, s = s.split(":")
|
||||
if "." in s:
|
||||
s, ms = s.split(".")
|
||||
ms = ms.ljust(6, "0")
|
||||
else:
|
||||
ms = 0
|
||||
if h[0] == "-":
|
||||
negative = True
|
||||
else:
|
||||
negative = False
|
||||
h, m, s, ms = abs(int(h)), int(m), int(s), int(ms)
|
||||
td = timedelta(hours=h, minutes=m, seconds=s, microseconds=ms)
|
||||
if negative:
|
||||
return -td
|
||||
else:
|
||||
return td
|
||||
except ValueError:
|
||||
# unpacking or int/float conversion failed
|
||||
return None
|
||||
|
||||
|
||||
def Time_or_None(s):
|
||||
try:
|
||||
h, m, s = s.split(":")
|
||||
if "." in s:
|
||||
s, ms = s.split(".")
|
||||
ms = ms.ljust(6, "0")
|
||||
else:
|
||||
ms = 0
|
||||
h, m, s, ms = int(h), int(m), int(s), int(ms)
|
||||
return time(hour=h, minute=m, second=s, microsecond=ms)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def Date_or_None(s):
|
||||
try:
|
||||
return date(
|
||||
int(s[:4]),
|
||||
int(s[5:7]),
|
||||
int(s[8:10]),
|
||||
) # year # month # day
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def DateTime2literal(d, c):
|
||||
"""Format a DateTime object as an ISO timestamp."""
|
||||
return string_literal(format_TIMESTAMP(d))
|
||||
|
||||
|
||||
def DateTimeDelta2literal(d, c):
|
||||
"""Format a DateTimeDelta object as a time."""
|
||||
return string_literal(format_TIMEDELTA(d))
|
||||
293
venv/lib/python3.11/site-packages/PIL/AvifImagePlugin.py
Normal file
293
venv/lib/python3.11/site-packages/PIL/AvifImagePlugin.py
Normal file
@ -0,0 +1,293 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from io import BytesIO
|
||||
from typing import IO
|
||||
|
||||
from . import ExifTags, Image, ImageFile
|
||||
|
||||
try:
|
||||
from . import _avif
|
||||
|
||||
SUPPORTED = True
|
||||
except ImportError:
|
||||
SUPPORTED = False
|
||||
|
||||
# Decoder options as module globals, until there is a way to pass parameters
|
||||
# to Image.open (see https://github.com/python-pillow/Pillow/issues/569)
|
||||
DECODE_CODEC_CHOICE = "auto"
|
||||
DEFAULT_MAX_THREADS = 0
|
||||
|
||||
|
||||
def get_codec_version(codec_name: str) -> str | None:
|
||||
versions = _avif.codec_versions()
|
||||
for version in versions.split(", "):
|
||||
if version.split(" [")[0] == codec_name:
|
||||
return version.split(":")[-1].split(" ")[0]
|
||||
return None
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool | str:
|
||||
if prefix[4:8] != b"ftyp":
|
||||
return False
|
||||
major_brand = prefix[8:12]
|
||||
if major_brand in (
|
||||
# coding brands
|
||||
b"avif",
|
||||
b"avis",
|
||||
# We accept files with AVIF container brands; we can't yet know if
|
||||
# the ftyp box has the correct compatible brands, but if it doesn't
|
||||
# then the plugin will raise a SyntaxError which Pillow will catch
|
||||
# before moving on to the next plugin that accepts the file.
|
||||
#
|
||||
# Also, because this file might not actually be an AVIF file, we
|
||||
# don't raise an error if AVIF support isn't properly compiled.
|
||||
b"mif1",
|
||||
b"msf1",
|
||||
):
|
||||
if not SUPPORTED:
|
||||
return (
|
||||
"image file could not be identified because AVIF support not installed"
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _get_default_max_threads() -> int:
|
||||
if DEFAULT_MAX_THREADS:
|
||||
return DEFAULT_MAX_THREADS
|
||||
if hasattr(os, "sched_getaffinity"):
|
||||
return len(os.sched_getaffinity(0))
|
||||
else:
|
||||
return os.cpu_count() or 1
|
||||
|
||||
|
||||
class AvifImageFile(ImageFile.ImageFile):
|
||||
format = "AVIF"
|
||||
format_description = "AVIF image"
|
||||
__frame = -1
|
||||
|
||||
def _open(self) -> None:
|
||||
if not SUPPORTED:
|
||||
msg = "image file could not be opened because AVIF support not installed"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
if DECODE_CODEC_CHOICE != "auto" and not _avif.decoder_codec_available(
|
||||
DECODE_CODEC_CHOICE
|
||||
):
|
||||
msg = "Invalid opening codec"
|
||||
raise ValueError(msg)
|
||||
|
||||
assert self.fp is not None
|
||||
self._decoder = _avif.AvifDecoder(
|
||||
self.fp.read(),
|
||||
DECODE_CODEC_CHOICE,
|
||||
_get_default_max_threads(),
|
||||
)
|
||||
|
||||
# Get info from decoder
|
||||
self._size, self.n_frames, self._mode, icc, exif, exif_orientation, xmp = (
|
||||
self._decoder.get_info()
|
||||
)
|
||||
self.is_animated = self.n_frames > 1
|
||||
|
||||
if icc:
|
||||
self.info["icc_profile"] = icc
|
||||
if xmp:
|
||||
self.info["xmp"] = xmp
|
||||
|
||||
if exif_orientation != 1 or exif:
|
||||
exif_data = Image.Exif()
|
||||
if exif:
|
||||
exif_data.load(exif)
|
||||
original_orientation = exif_data.get(ExifTags.Base.Orientation, 1)
|
||||
else:
|
||||
original_orientation = 1
|
||||
if exif_orientation != original_orientation:
|
||||
exif_data[ExifTags.Base.Orientation] = exif_orientation
|
||||
exif = exif_data.tobytes()
|
||||
if exif:
|
||||
self.info["exif"] = exif
|
||||
self.seek(0)
|
||||
|
||||
def seek(self, frame: int) -> None:
|
||||
if not self._seek_check(frame):
|
||||
return
|
||||
|
||||
# Set tile
|
||||
self.__frame = frame
|
||||
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 0, self.mode)]
|
||||
|
||||
def load(self) -> Image.core.PixelAccess | None:
|
||||
if self.tile:
|
||||
# We need to load the image data for this frame
|
||||
data, timescale, pts_in_timescales, duration_in_timescales = (
|
||||
self._decoder.get_frame(self.__frame)
|
||||
)
|
||||
self.info["timestamp"] = round(1000 * (pts_in_timescales / timescale))
|
||||
self.info["duration"] = round(1000 * (duration_in_timescales / timescale))
|
||||
|
||||
if self.fp and self._exclusive_fp:
|
||||
self.fp.close()
|
||||
self.fp = BytesIO(data)
|
||||
|
||||
return super().load()
|
||||
|
||||
def load_seek(self, pos: int) -> None:
|
||||
pass
|
||||
|
||||
def tell(self) -> int:
|
||||
return self.__frame
|
||||
|
||||
|
||||
def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||
_save(im, fp, filename, save_all=True)
|
||||
|
||||
|
||||
def _save(
|
||||
im: Image.Image, fp: IO[bytes], filename: str | bytes, save_all: bool = False
|
||||
) -> None:
|
||||
info = im.encoderinfo.copy()
|
||||
if save_all:
|
||||
append_images = list(info.get("append_images", []))
|
||||
else:
|
||||
append_images = []
|
||||
|
||||
total = 0
|
||||
for ims in [im] + append_images:
|
||||
total += getattr(ims, "n_frames", 1)
|
||||
|
||||
quality = info.get("quality", 75)
|
||||
if not isinstance(quality, int) or quality < 0 or quality > 100:
|
||||
msg = "Invalid quality setting"
|
||||
raise ValueError(msg)
|
||||
|
||||
duration = info.get("duration", 0)
|
||||
subsampling = info.get("subsampling", "4:2:0")
|
||||
speed = info.get("speed", 6)
|
||||
max_threads = info.get("max_threads", _get_default_max_threads())
|
||||
codec = info.get("codec", "auto")
|
||||
if codec != "auto" and not _avif.encoder_codec_available(codec):
|
||||
msg = "Invalid saving codec"
|
||||
raise ValueError(msg)
|
||||
range_ = info.get("range", "full")
|
||||
tile_rows_log2 = info.get("tile_rows", 0)
|
||||
tile_cols_log2 = info.get("tile_cols", 0)
|
||||
alpha_premultiplied = bool(info.get("alpha_premultiplied", False))
|
||||
autotiling = bool(info.get("autotiling", tile_rows_log2 == tile_cols_log2 == 0))
|
||||
|
||||
icc_profile = info.get("icc_profile", im.info.get("icc_profile"))
|
||||
exif_orientation = 1
|
||||
if exif := info.get("exif"):
|
||||
if isinstance(exif, Image.Exif):
|
||||
exif_data = exif
|
||||
else:
|
||||
exif_data = Image.Exif()
|
||||
exif_data.load(exif)
|
||||
if ExifTags.Base.Orientation in exif_data:
|
||||
exif_orientation = exif_data.pop(ExifTags.Base.Orientation)
|
||||
exif = exif_data.tobytes() if exif_data else b""
|
||||
elif isinstance(exif, Image.Exif):
|
||||
exif = exif_data.tobytes()
|
||||
|
||||
xmp = info.get("xmp")
|
||||
|
||||
if isinstance(xmp, str):
|
||||
xmp = xmp.encode("utf-8")
|
||||
|
||||
advanced = info.get("advanced")
|
||||
if advanced is not None:
|
||||
if isinstance(advanced, dict):
|
||||
advanced = advanced.items()
|
||||
try:
|
||||
advanced = tuple(advanced)
|
||||
except TypeError:
|
||||
invalid = True
|
||||
else:
|
||||
invalid = any(not isinstance(v, tuple) or len(v) != 2 for v in advanced)
|
||||
if invalid:
|
||||
msg = (
|
||||
"advanced codec options must be a dict of key-value string "
|
||||
"pairs or a series of key-value two-tuples"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
# Setup the AVIF encoder
|
||||
enc = _avif.AvifEncoder(
|
||||
im.size,
|
||||
subsampling,
|
||||
quality,
|
||||
speed,
|
||||
max_threads,
|
||||
codec,
|
||||
range_,
|
||||
tile_rows_log2,
|
||||
tile_cols_log2,
|
||||
alpha_premultiplied,
|
||||
autotiling,
|
||||
icc_profile or b"",
|
||||
exif or b"",
|
||||
exif_orientation,
|
||||
xmp or b"",
|
||||
advanced,
|
||||
)
|
||||
|
||||
# Add each frame
|
||||
frame_idx = 0
|
||||
frame_duration = 0
|
||||
cur_idx = im.tell()
|
||||
is_single_frame = total == 1
|
||||
try:
|
||||
for ims in [im] + append_images:
|
||||
# Get number of frames in this image
|
||||
nfr = getattr(ims, "n_frames", 1)
|
||||
|
||||
for idx in range(nfr):
|
||||
ims.seek(idx)
|
||||
|
||||
# Make sure image mode is supported
|
||||
frame = ims
|
||||
rawmode = ims.mode
|
||||
if ims.mode not in {"RGB", "RGBA"}:
|
||||
rawmode = "RGBA" if ims.has_transparency_data else "RGB"
|
||||
frame = ims.convert(rawmode)
|
||||
|
||||
# Update frame duration
|
||||
if isinstance(duration, (list, tuple)):
|
||||
frame_duration = duration[frame_idx]
|
||||
else:
|
||||
frame_duration = duration
|
||||
|
||||
# Append the frame to the animation encoder
|
||||
enc.add(
|
||||
frame.tobytes("raw", rawmode),
|
||||
frame_duration,
|
||||
frame.size,
|
||||
rawmode,
|
||||
is_single_frame,
|
||||
)
|
||||
|
||||
# Update frame index
|
||||
frame_idx += 1
|
||||
|
||||
if not save_all:
|
||||
break
|
||||
|
||||
finally:
|
||||
im.seek(cur_idx)
|
||||
|
||||
# Get the final output from the encoder
|
||||
data = enc.finish()
|
||||
if data is None:
|
||||
msg = "cannot write file as AVIF (encoder returned None)"
|
||||
raise OSError(msg)
|
||||
|
||||
fp.write(data)
|
||||
|
||||
|
||||
Image.register_open(AvifImageFile.format, AvifImageFile, _accept)
|
||||
if SUPPORTED:
|
||||
Image.register_save(AvifImageFile.format, _save)
|
||||
Image.register_save_all(AvifImageFile.format, _save_all)
|
||||
Image.register_extensions(AvifImageFile.format, [".avif", ".avifs"])
|
||||
Image.register_mime(AvifImageFile.format, "image/avif")
|
||||
122
venv/lib/python3.11/site-packages/PIL/BdfFontFile.py
Normal file
122
venv/lib/python3.11/site-packages/PIL/BdfFontFile.py
Normal file
@ -0,0 +1,122 @@
|
||||
#
|
||||
# The Python Imaging Library
|
||||
# $Id$
|
||||
#
|
||||
# bitmap distribution font (bdf) file parser
|
||||
#
|
||||
# history:
|
||||
# 1996-05-16 fl created (as bdf2pil)
|
||||
# 1997-08-25 fl converted to FontFile driver
|
||||
# 2001-05-25 fl removed bogus __init__ call
|
||||
# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev)
|
||||
# 2003-04-22 fl more robustification (from Graham Dumpleton)
|
||||
#
|
||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
||||
# Copyright (c) 1997-2003 by Fredrik Lundh.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
|
||||
"""
|
||||
Parse X Bitmap Distribution Format (BDF)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import BinaryIO
|
||||
|
||||
from . import FontFile, Image
|
||||
|
||||
|
||||
def bdf_char(
|
||||
f: BinaryIO,
|
||||
) -> (
|
||||
tuple[
|
||||
str,
|
||||
int,
|
||||
tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]],
|
||||
Image.Image,
|
||||
]
|
||||
| None
|
||||
):
|
||||
# skip to STARTCHAR
|
||||
while True:
|
||||
s = f.readline()
|
||||
if not s:
|
||||
return None
|
||||
if s.startswith(b"STARTCHAR"):
|
||||
break
|
||||
id = s[9:].strip().decode("ascii")
|
||||
|
||||
# load symbol properties
|
||||
props = {}
|
||||
while True:
|
||||
s = f.readline()
|
||||
if not s or s.startswith(b"BITMAP"):
|
||||
break
|
||||
i = s.find(b" ")
|
||||
props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
|
||||
|
||||
# load bitmap
|
||||
bitmap = bytearray()
|
||||
while True:
|
||||
s = f.readline()
|
||||
if not s or s.startswith(b"ENDCHAR"):
|
||||
break
|
||||
bitmap += s[:-1]
|
||||
|
||||
# The word BBX
|
||||
# followed by the width in x (BBw), height in y (BBh),
|
||||
# and x and y displacement (BBxoff0, BByoff0)
|
||||
# of the lower left corner from the origin of the character.
|
||||
width, height, x_disp, y_disp = (int(p) for p in props["BBX"].split())
|
||||
|
||||
# The word DWIDTH
|
||||
# followed by the width in x and y of the character in device pixels.
|
||||
dwx, dwy = (int(p) for p in props["DWIDTH"].split())
|
||||
|
||||
bbox = (
|
||||
(dwx, dwy),
|
||||
(x_disp, -y_disp - height, width + x_disp, -y_disp),
|
||||
(0, 0, width, height),
|
||||
)
|
||||
|
||||
try:
|
||||
im = Image.frombytes("1", (width, height), bitmap, "hex", "1")
|
||||
except ValueError:
|
||||
# deal with zero-width characters
|
||||
im = Image.new("1", (width, height))
|
||||
|
||||
return id, int(props["ENCODING"]), bbox, im
|
||||
|
||||
|
||||
class BdfFontFile(FontFile.FontFile):
|
||||
"""Font file plugin for the X11 BDF format."""
|
||||
|
||||
def __init__(self, fp: BinaryIO) -> None:
|
||||
super().__init__()
|
||||
|
||||
s = fp.readline()
|
||||
if not s.startswith(b"STARTFONT 2.1"):
|
||||
msg = "not a valid BDF file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
props = {}
|
||||
comments = []
|
||||
|
||||
while True:
|
||||
s = fp.readline()
|
||||
if not s or s.startswith(b"ENDPROPERTIES"):
|
||||
break
|
||||
i = s.find(b" ")
|
||||
props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
|
||||
if s[:i] in [b"COMMENT", b"COPYRIGHT"]:
|
||||
if s.find(b"LogicalFontDescription") < 0:
|
||||
comments.append(s[i + 1 : -1].decode("ascii"))
|
||||
|
||||
while True:
|
||||
c = bdf_char(fp)
|
||||
if not c:
|
||||
break
|
||||
id, ch, (xy, dst, src), im = c
|
||||
if 0 <= ch < len(self.glyph):
|
||||
self.glyph[ch] = xy, dst, src, im
|
||||
498
venv/lib/python3.11/site-packages/PIL/BlpImagePlugin.py
Normal file
498
venv/lib/python3.11/site-packages/PIL/BlpImagePlugin.py
Normal file
@ -0,0 +1,498 @@
|
||||
"""
|
||||
Blizzard Mipmap Format (.blp)
|
||||
Jerome Leclanche <jerome@leclan.ch>
|
||||
|
||||
The contents of this file are hereby released in the public domain (CC0)
|
||||
Full text of the CC0 license:
|
||||
https://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
BLP1 files, used mostly in Warcraft III, are not fully supported.
|
||||
All types of BLP2 files used in World of Warcraft are supported.
|
||||
|
||||
The BLP file structure consists of a header, up to 16 mipmaps of the
|
||||
texture
|
||||
|
||||
Texture sizes must be powers of two, though the two dimensions do
|
||||
not have to be equal; 512x256 is valid, but 512x200 is not.
|
||||
The first mipmap (mipmap #0) is the full size image; each subsequent
|
||||
mipmap halves both dimensions. The final mipmap should be 1x1.
|
||||
|
||||
BLP files come in many different flavours:
|
||||
* JPEG-compressed (type == 0) - only supported for BLP1.
|
||||
* RAW images (type == 1, encoding == 1). Each mipmap is stored as an
|
||||
array of 8-bit values, one per pixel, left to right, top to bottom.
|
||||
Each value is an index to the palette.
|
||||
* DXT-compressed (type == 1, encoding == 2):
|
||||
- DXT1 compression is used if alpha_encoding == 0.
|
||||
- An additional alpha bit is used if alpha_depth == 1.
|
||||
- DXT3 compression is used if alpha_encoding == 1.
|
||||
- DXT5 compression is used if alpha_encoding == 7.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import os
|
||||
import struct
|
||||
from enum import IntEnum
|
||||
from io import BytesIO
|
||||
from typing import IO
|
||||
|
||||
from . import Image, ImageFile
|
||||
|
||||
|
||||
class Format(IntEnum):
|
||||
JPEG = 0
|
||||
|
||||
|
||||
class Encoding(IntEnum):
|
||||
UNCOMPRESSED = 1
|
||||
DXT = 2
|
||||
UNCOMPRESSED_RAW_BGRA = 3
|
||||
|
||||
|
||||
class AlphaEncoding(IntEnum):
|
||||
DXT1 = 0
|
||||
DXT3 = 1
|
||||
DXT5 = 7
|
||||
|
||||
|
||||
def unpack_565(i: int) -> tuple[int, int, int]:
|
||||
return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3
|
||||
|
||||
|
||||
def decode_dxt1(
|
||||
data: bytes, alpha: bool = False
|
||||
) -> tuple[bytearray, bytearray, bytearray, bytearray]:
|
||||
"""
|
||||
input: one "row" of data (i.e. will produce 4*width pixels)
|
||||
"""
|
||||
|
||||
blocks = len(data) // 8 # number of blocks in row
|
||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
||||
|
||||
for block_index in range(blocks):
|
||||
# Decode next 8-byte block.
|
||||
idx = block_index * 8
|
||||
color0, color1, bits = struct.unpack_from("<HHI", data, idx)
|
||||
|
||||
r0, g0, b0 = unpack_565(color0)
|
||||
r1, g1, b1 = unpack_565(color1)
|
||||
|
||||
# Decode this block into 4x4 pixels
|
||||
# Accumulate the results onto our 4 row accumulators
|
||||
for j in range(4):
|
||||
for i in range(4):
|
||||
# get next control op and generate a pixel
|
||||
|
||||
control = bits & 3
|
||||
bits = bits >> 2
|
||||
|
||||
a = 0xFF
|
||||
if control == 0:
|
||||
r, g, b = r0, g0, b0
|
||||
elif control == 1:
|
||||
r, g, b = r1, g1, b1
|
||||
elif control == 2:
|
||||
if color0 > color1:
|
||||
r = (2 * r0 + r1) // 3
|
||||
g = (2 * g0 + g1) // 3
|
||||
b = (2 * b0 + b1) // 3
|
||||
else:
|
||||
r = (r0 + r1) // 2
|
||||
g = (g0 + g1) // 2
|
||||
b = (b0 + b1) // 2
|
||||
elif control == 3:
|
||||
if color0 > color1:
|
||||
r = (2 * r1 + r0) // 3
|
||||
g = (2 * g1 + g0) // 3
|
||||
b = (2 * b1 + b0) // 3
|
||||
else:
|
||||
r, g, b, a = 0, 0, 0, 0
|
||||
|
||||
if alpha:
|
||||
ret[j].extend([r, g, b, a])
|
||||
else:
|
||||
ret[j].extend([r, g, b])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def decode_dxt3(data: bytes) -> tuple[bytearray, bytearray, bytearray, bytearray]:
|
||||
"""
|
||||
input: one "row" of data (i.e. will produce 4*width pixels)
|
||||
"""
|
||||
|
||||
blocks = len(data) // 16 # number of blocks in row
|
||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
||||
|
||||
for block_index in range(blocks):
|
||||
idx = block_index * 16
|
||||
block = data[idx : idx + 16]
|
||||
# Decode next 16-byte block.
|
||||
bits = struct.unpack_from("<8B", block)
|
||||
color0, color1 = struct.unpack_from("<HH", block, 8)
|
||||
|
||||
(code,) = struct.unpack_from("<I", block, 12)
|
||||
|
||||
r0, g0, b0 = unpack_565(color0)
|
||||
r1, g1, b1 = unpack_565(color1)
|
||||
|
||||
for j in range(4):
|
||||
high = False # Do we want the higher bits?
|
||||
for i in range(4):
|
||||
alphacode_index = (4 * j + i) // 2
|
||||
a = bits[alphacode_index]
|
||||
if high:
|
||||
high = False
|
||||
a >>= 4
|
||||
else:
|
||||
high = True
|
||||
a &= 0xF
|
||||
a *= 17 # We get a value between 0 and 15
|
||||
|
||||
color_code = (code >> 2 * (4 * j + i)) & 0x03
|
||||
|
||||
if color_code == 0:
|
||||
r, g, b = r0, g0, b0
|
||||
elif color_code == 1:
|
||||
r, g, b = r1, g1, b1
|
||||
elif color_code == 2:
|
||||
r = (2 * r0 + r1) // 3
|
||||
g = (2 * g0 + g1) // 3
|
||||
b = (2 * b0 + b1) // 3
|
||||
elif color_code == 3:
|
||||
r = (2 * r1 + r0) // 3
|
||||
g = (2 * g1 + g0) // 3
|
||||
b = (2 * b1 + b0) // 3
|
||||
|
||||
ret[j].extend([r, g, b, a])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def decode_dxt5(data: bytes) -> tuple[bytearray, bytearray, bytearray, bytearray]:
|
||||
"""
|
||||
input: one "row" of data (i.e. will produce 4 * width pixels)
|
||||
"""
|
||||
|
||||
blocks = len(data) // 16 # number of blocks in row
|
||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
||||
|
||||
for block_index in range(blocks):
|
||||
idx = block_index * 16
|
||||
block = data[idx : idx + 16]
|
||||
# Decode next 16-byte block.
|
||||
a0, a1 = struct.unpack_from("<BB", block)
|
||||
|
||||
bits = struct.unpack_from("<6B", block, 2)
|
||||
alphacode1 = bits[2] | (bits[3] << 8) | (bits[4] << 16) | (bits[5] << 24)
|
||||
alphacode2 = bits[0] | (bits[1] << 8)
|
||||
|
||||
color0, color1 = struct.unpack_from("<HH", block, 8)
|
||||
|
||||
(code,) = struct.unpack_from("<I", block, 12)
|
||||
|
||||
r0, g0, b0 = unpack_565(color0)
|
||||
r1, g1, b1 = unpack_565(color1)
|
||||
|
||||
for j in range(4):
|
||||
for i in range(4):
|
||||
# get next control op and generate a pixel
|
||||
alphacode_index = 3 * (4 * j + i)
|
||||
|
||||
if alphacode_index <= 12:
|
||||
alphacode = (alphacode2 >> alphacode_index) & 0x07
|
||||
elif alphacode_index == 15:
|
||||
alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06)
|
||||
else: # alphacode_index >= 18 and alphacode_index <= 45
|
||||
alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07
|
||||
|
||||
if alphacode == 0:
|
||||
a = a0
|
||||
elif alphacode == 1:
|
||||
a = a1
|
||||
elif a0 > a1:
|
||||
a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7
|
||||
elif alphacode == 6:
|
||||
a = 0
|
||||
elif alphacode == 7:
|
||||
a = 255
|
||||
else:
|
||||
a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5
|
||||
|
||||
color_code = (code >> 2 * (4 * j + i)) & 0x03
|
||||
|
||||
if color_code == 0:
|
||||
r, g, b = r0, g0, b0
|
||||
elif color_code == 1:
|
||||
r, g, b = r1, g1, b1
|
||||
elif color_code == 2:
|
||||
r = (2 * r0 + r1) // 3
|
||||
g = (2 * g0 + g1) // 3
|
||||
b = (2 * b0 + b1) // 3
|
||||
elif color_code == 3:
|
||||
r = (2 * r1 + r0) // 3
|
||||
g = (2 * g1 + g0) // 3
|
||||
b = (2 * b1 + b0) // 3
|
||||
|
||||
ret[j].extend([r, g, b, a])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class BLPFormatError(NotImplementedError):
|
||||
pass
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith((b"BLP1", b"BLP2"))
|
||||
|
||||
|
||||
class BlpImageFile(ImageFile.ImageFile):
|
||||
"""
|
||||
Blizzard Mipmap Format
|
||||
"""
|
||||
|
||||
format = "BLP"
|
||||
format_description = "Blizzard Mipmap Format"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
self.magic = self.fp.read(4)
|
||||
if not _accept(self.magic):
|
||||
msg = f"Bad BLP magic {repr(self.magic)}"
|
||||
raise BLPFormatError(msg)
|
||||
|
||||
compression = struct.unpack("<i", self.fp.read(4))[0]
|
||||
if self.magic == b"BLP1":
|
||||
alpha = struct.unpack("<I", self.fp.read(4))[0] != 0
|
||||
else:
|
||||
encoding = struct.unpack("<b", self.fp.read(1))[0]
|
||||
alpha = struct.unpack("<b", self.fp.read(1))[0] != 0
|
||||
alpha_encoding = struct.unpack("<b", self.fp.read(1))[0]
|
||||
self.fp.seek(1, os.SEEK_CUR) # mips
|
||||
|
||||
self._size = struct.unpack("<II", self.fp.read(8))
|
||||
|
||||
args: tuple[int, int, bool] | tuple[int, int, bool, int]
|
||||
if self.magic == b"BLP1":
|
||||
encoding = struct.unpack("<i", self.fp.read(4))[0]
|
||||
self.fp.seek(4, os.SEEK_CUR) # subtype
|
||||
|
||||
args = (compression, encoding, alpha)
|
||||
offset = 28
|
||||
else:
|
||||
args = (compression, encoding, alpha, alpha_encoding)
|
||||
offset = 20
|
||||
|
||||
decoder = self.magic.decode()
|
||||
|
||||
self._mode = "RGBA" if alpha else "RGB"
|
||||
self.tile = [ImageFile._Tile(decoder, (0, 0) + self.size, offset, args)]
|
||||
|
||||
|
||||
class _BLPBaseDecoder(abc.ABC, ImageFile.PyDecoder):
|
||||
_pulls_fd = True
|
||||
|
||||
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
||||
try:
|
||||
self._read_header()
|
||||
self._load()
|
||||
except struct.error as e:
|
||||
msg = "Truncated BLP file"
|
||||
raise OSError(msg) from e
|
||||
return -1, 0
|
||||
|
||||
@abc.abstractmethod
|
||||
def _load(self) -> None:
|
||||
pass
|
||||
|
||||
def _read_header(self) -> None:
|
||||
self._offsets = struct.unpack("<16I", self._safe_read(16 * 4))
|
||||
self._lengths = struct.unpack("<16I", self._safe_read(16 * 4))
|
||||
|
||||
def _safe_read(self, length: int) -> bytes:
|
||||
assert self.fd is not None
|
||||
return ImageFile._safe_read(self.fd, length)
|
||||
|
||||
def _read_palette(self) -> list[tuple[int, int, int, int]]:
|
||||
ret = []
|
||||
for i in range(256):
|
||||
try:
|
||||
b, g, r, a = struct.unpack("<4B", self._safe_read(4))
|
||||
except struct.error:
|
||||
break
|
||||
ret.append((b, g, r, a))
|
||||
return ret
|
||||
|
||||
def _read_bgra(
|
||||
self, palette: list[tuple[int, int, int, int]], alpha: bool
|
||||
) -> bytearray:
|
||||
data = bytearray()
|
||||
_data = BytesIO(self._safe_read(self._lengths[0]))
|
||||
while True:
|
||||
try:
|
||||
(offset,) = struct.unpack("<B", _data.read(1))
|
||||
except struct.error:
|
||||
break
|
||||
b, g, r, a = palette[offset]
|
||||
d: tuple[int, ...] = (r, g, b)
|
||||
if alpha:
|
||||
d += (a,)
|
||||
data.extend(d)
|
||||
return data
|
||||
|
||||
|
||||
class BLP1Decoder(_BLPBaseDecoder):
|
||||
def _load(self) -> None:
|
||||
self._compression, self._encoding, alpha = self.args
|
||||
|
||||
if self._compression == Format.JPEG:
|
||||
self._decode_jpeg_stream()
|
||||
|
||||
elif self._compression == 1:
|
||||
if self._encoding in (4, 5):
|
||||
palette = self._read_palette()
|
||||
data = self._read_bgra(palette, alpha)
|
||||
self.set_as_raw(data)
|
||||
else:
|
||||
msg = f"Unsupported BLP encoding {repr(self._encoding)}"
|
||||
raise BLPFormatError(msg)
|
||||
else:
|
||||
msg = f"Unsupported BLP compression {repr(self._encoding)}"
|
||||
raise BLPFormatError(msg)
|
||||
|
||||
def _decode_jpeg_stream(self) -> None:
|
||||
from .JpegImagePlugin import JpegImageFile
|
||||
|
||||
(jpeg_header_size,) = struct.unpack("<I", self._safe_read(4))
|
||||
jpeg_header = self._safe_read(jpeg_header_size)
|
||||
assert self.fd is not None
|
||||
self._safe_read(self._offsets[0] - self.fd.tell()) # What IS this?
|
||||
data = self._safe_read(self._lengths[0])
|
||||
data = jpeg_header + data
|
||||
image = JpegImageFile(BytesIO(data))
|
||||
Image._decompression_bomb_check(image.size)
|
||||
if image.mode == "CMYK":
|
||||
args = image.tile[0].args
|
||||
assert isinstance(args, tuple)
|
||||
image.tile = [image.tile[0]._replace(args=(args[0], "CMYK"))]
|
||||
self.set_as_raw(image.convert("RGB").tobytes(), "BGR")
|
||||
|
||||
|
||||
class BLP2Decoder(_BLPBaseDecoder):
|
||||
def _load(self) -> None:
|
||||
self._compression, self._encoding, alpha, self._alpha_encoding = self.args
|
||||
|
||||
palette = self._read_palette()
|
||||
|
||||
assert self.fd is not None
|
||||
self.fd.seek(self._offsets[0])
|
||||
|
||||
if self._compression == 1:
|
||||
# Uncompressed or DirectX compression
|
||||
|
||||
if self._encoding == Encoding.UNCOMPRESSED:
|
||||
data = self._read_bgra(palette, alpha)
|
||||
|
||||
elif self._encoding == Encoding.DXT:
|
||||
data = bytearray()
|
||||
if self._alpha_encoding == AlphaEncoding.DXT1:
|
||||
linesize = (self.state.xsize + 3) // 4 * 8
|
||||
for yb in range((self.state.ysize + 3) // 4):
|
||||
for d in decode_dxt1(self._safe_read(linesize), alpha):
|
||||
data += d
|
||||
|
||||
elif self._alpha_encoding == AlphaEncoding.DXT3:
|
||||
linesize = (self.state.xsize + 3) // 4 * 16
|
||||
for yb in range((self.state.ysize + 3) // 4):
|
||||
for d in decode_dxt3(self._safe_read(linesize)):
|
||||
data += d
|
||||
|
||||
elif self._alpha_encoding == AlphaEncoding.DXT5:
|
||||
linesize = (self.state.xsize + 3) // 4 * 16
|
||||
for yb in range((self.state.ysize + 3) // 4):
|
||||
for d in decode_dxt5(self._safe_read(linesize)):
|
||||
data += d
|
||||
else:
|
||||
msg = f"Unsupported alpha encoding {repr(self._alpha_encoding)}"
|
||||
raise BLPFormatError(msg)
|
||||
else:
|
||||
msg = f"Unknown BLP encoding {repr(self._encoding)}"
|
||||
raise BLPFormatError(msg)
|
||||
|
||||
else:
|
||||
msg = f"Unknown BLP compression {repr(self._compression)}"
|
||||
raise BLPFormatError(msg)
|
||||
|
||||
self.set_as_raw(data)
|
||||
|
||||
|
||||
class BLPEncoder(ImageFile.PyEncoder):
|
||||
_pushes_fd = True
|
||||
|
||||
def _write_palette(self) -> bytes:
|
||||
data = b""
|
||||
assert self.im is not None
|
||||
palette = self.im.getpalette("RGBA", "RGBA")
|
||||
for i in range(len(palette) // 4):
|
||||
r, g, b, a = palette[i * 4 : (i + 1) * 4]
|
||||
data += struct.pack("<4B", b, g, r, a)
|
||||
while len(data) < 256 * 4:
|
||||
data += b"\x00" * 4
|
||||
return data
|
||||
|
||||
def encode(self, bufsize: int) -> tuple[int, int, bytes]:
|
||||
palette_data = self._write_palette()
|
||||
|
||||
offset = 20 + 16 * 4 * 2 + len(palette_data)
|
||||
data = struct.pack("<16I", offset, *((0,) * 15))
|
||||
|
||||
assert self.im is not None
|
||||
w, h = self.im.size
|
||||
data += struct.pack("<16I", w * h, *((0,) * 15))
|
||||
|
||||
data += palette_data
|
||||
|
||||
for y in range(h):
|
||||
for x in range(w):
|
||||
data += struct.pack("<B", self.im.getpixel((x, y)))
|
||||
|
||||
return len(data), 0, data
|
||||
|
||||
|
||||
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||
if im.mode != "P":
|
||||
msg = "Unsupported BLP image mode"
|
||||
raise ValueError(msg)
|
||||
|
||||
magic = b"BLP1" if im.encoderinfo.get("blp_version") == "BLP1" else b"BLP2"
|
||||
fp.write(magic)
|
||||
|
||||
assert im.palette is not None
|
||||
fp.write(struct.pack("<i", 1)) # Uncompressed or DirectX compression
|
||||
|
||||
alpha_depth = 1 if im.palette.mode == "RGBA" else 0
|
||||
if magic == b"BLP1":
|
||||
fp.write(struct.pack("<L", alpha_depth))
|
||||
else:
|
||||
fp.write(struct.pack("<b", Encoding.UNCOMPRESSED))
|
||||
fp.write(struct.pack("<b", alpha_depth))
|
||||
fp.write(struct.pack("<b", 0)) # alpha encoding
|
||||
fp.write(struct.pack("<b", 0)) # mips
|
||||
fp.write(struct.pack("<II", *im.size))
|
||||
if magic == b"BLP1":
|
||||
fp.write(struct.pack("<i", 5))
|
||||
fp.write(struct.pack("<i", 0))
|
||||
|
||||
ImageFile._save(im, fp, [ImageFile._Tile("BLP", (0, 0) + im.size, 0, im.mode)])
|
||||
|
||||
|
||||
Image.register_open(BlpImageFile.format, BlpImageFile, _accept)
|
||||
Image.register_extension(BlpImageFile.format, ".blp")
|
||||
Image.register_decoder("BLP1", BLP1Decoder)
|
||||
Image.register_decoder("BLP2", BLP2Decoder)
|
||||
|
||||
Image.register_save(BlpImageFile.format, _save)
|
||||
Image.register_encoder("BLP", BLPEncoder)
|
||||
517
venv/lib/python3.11/site-packages/PIL/BmpImagePlugin.py
Normal file
517
venv/lib/python3.11/site-packages/PIL/BmpImagePlugin.py
Normal file
@ -0,0 +1,517 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# BMP file handler
|
||||
#
|
||||
# Windows (and OS/2) native bitmap storage format.
|
||||
#
|
||||
# history:
|
||||
# 1995-09-01 fl Created
|
||||
# 1996-04-30 fl Added save
|
||||
# 1997-08-27 fl Fixed save of 1-bit images
|
||||
# 1998-03-06 fl Load P images as L where possible
|
||||
# 1998-07-03 fl Load P images as 1 where possible
|
||||
# 1998-12-29 fl Handle small palettes
|
||||
# 2002-12-30 fl Fixed load of 1-bit palette images
|
||||
# 2003-04-21 fl Fixed load of 1-bit monochrome images
|
||||
# 2003-04-23 fl Added limited support for BI_BITFIELDS compression
|
||||
#
|
||||
# Copyright (c) 1997-2003 by Secret Labs AB
|
||||
# Copyright (c) 1995-2003 by Fredrik Lundh
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import IO, Any
|
||||
|
||||
from . import Image, ImageFile, ImagePalette
|
||||
from ._binary import i16le as i16
|
||||
from ._binary import i32le as i32
|
||||
from ._binary import o8
|
||||
from ._binary import o16le as o16
|
||||
from ._binary import o32le as o32
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
# Read BMP file
|
||||
|
||||
BIT2MODE = {
|
||||
# bits => mode, rawmode
|
||||
1: ("P", "P;1"),
|
||||
4: ("P", "P;4"),
|
||||
8: ("P", "P"),
|
||||
16: ("RGB", "BGR;15"),
|
||||
24: ("RGB", "BGR"),
|
||||
32: ("RGB", "BGRX"),
|
||||
}
|
||||
|
||||
USE_RAW_ALPHA = False
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(b"BM")
|
||||
|
||||
|
||||
def _dib_accept(prefix: bytes) -> bool:
|
||||
return i32(prefix) in [12, 40, 52, 56, 64, 108, 124]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Image plugin for the Windows BMP format.
|
||||
# =============================================================================
|
||||
class BmpImageFile(ImageFile.ImageFile):
|
||||
"""Image plugin for the Windows Bitmap format (BMP)"""
|
||||
|
||||
# ------------------------------------------------------------- Description
|
||||
format_description = "Windows Bitmap"
|
||||
format = "BMP"
|
||||
|
||||
# -------------------------------------------------- BMP Compression values
|
||||
COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5}
|
||||
for k, v in COMPRESSIONS.items():
|
||||
vars()[k] = v
|
||||
|
||||
def _bitmap(self, header: int = 0, offset: int = 0) -> None:
|
||||
"""Read relevant info about the BMP"""
|
||||
assert self.fp is not None
|
||||
read, seek = self.fp.read, self.fp.seek
|
||||
if header:
|
||||
seek(header)
|
||||
# read bmp header size @offset 14 (this is part of the header size)
|
||||
file_info: dict[str, bool | int | tuple[int, ...]] = {
|
||||
"header_size": i32(read(4)),
|
||||
"direction": -1,
|
||||
}
|
||||
|
||||
# -------------------- If requested, read header at a specific position
|
||||
# read the rest of the bmp header, without its size
|
||||
assert isinstance(file_info["header_size"], int)
|
||||
header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4)
|
||||
|
||||
# ------------------------------- Windows Bitmap v2, IBM OS/2 Bitmap v1
|
||||
# ----- This format has different offsets because of width/height types
|
||||
# 12: BITMAPCOREHEADER/OS21XBITMAPHEADER
|
||||
if file_info["header_size"] == 12:
|
||||
file_info["width"] = i16(header_data, 0)
|
||||
file_info["height"] = i16(header_data, 2)
|
||||
file_info["planes"] = i16(header_data, 4)
|
||||
file_info["bits"] = i16(header_data, 6)
|
||||
file_info["compression"] = self.COMPRESSIONS["RAW"]
|
||||
file_info["palette_padding"] = 3
|
||||
|
||||
# --------------------------------------------- Windows Bitmap v3 to v5
|
||||
# 40: BITMAPINFOHEADER
|
||||
# 52: BITMAPV2HEADER
|
||||
# 56: BITMAPV3HEADER
|
||||
# 64: BITMAPCOREHEADER2/OS22XBITMAPHEADER
|
||||
# 108: BITMAPV4HEADER
|
||||
# 124: BITMAPV5HEADER
|
||||
elif file_info["header_size"] in (40, 52, 56, 64, 108, 124):
|
||||
file_info["y_flip"] = header_data[7] == 0xFF
|
||||
file_info["direction"] = 1 if file_info["y_flip"] else -1
|
||||
file_info["width"] = i32(header_data, 0)
|
||||
file_info["height"] = (
|
||||
i32(header_data, 4)
|
||||
if not file_info["y_flip"]
|
||||
else 2**32 - i32(header_data, 4)
|
||||
)
|
||||
file_info["planes"] = i16(header_data, 8)
|
||||
file_info["bits"] = i16(header_data, 10)
|
||||
file_info["compression"] = i32(header_data, 12)
|
||||
# byte size of pixel data
|
||||
file_info["data_size"] = i32(header_data, 16)
|
||||
file_info["pixels_per_meter"] = (
|
||||
i32(header_data, 20),
|
||||
i32(header_data, 24),
|
||||
)
|
||||
file_info["colors"] = i32(header_data, 28)
|
||||
file_info["palette_padding"] = 4
|
||||
assert isinstance(file_info["pixels_per_meter"], tuple)
|
||||
self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"])
|
||||
if file_info["compression"] == self.COMPRESSIONS["BITFIELDS"]:
|
||||
masks = ["r_mask", "g_mask", "b_mask"]
|
||||
if len(header_data) >= 48:
|
||||
if len(header_data) >= 52:
|
||||
masks.append("a_mask")
|
||||
else:
|
||||
file_info["a_mask"] = 0x0
|
||||
for idx, mask in enumerate(masks):
|
||||
file_info[mask] = i32(header_data, 36 + idx * 4)
|
||||
else:
|
||||
# 40 byte headers only have the three components in the
|
||||
# bitfields masks, ref:
|
||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
|
||||
# See also
|
||||
# https://github.com/python-pillow/Pillow/issues/1293
|
||||
# There is a 4th component in the RGBQuad, in the alpha
|
||||
# location, but it is listed as a reserved component,
|
||||
# and it is not generally an alpha channel
|
||||
file_info["a_mask"] = 0x0
|
||||
for mask in masks:
|
||||
file_info[mask] = i32(read(4))
|
||||
assert isinstance(file_info["r_mask"], int)
|
||||
assert isinstance(file_info["g_mask"], int)
|
||||
assert isinstance(file_info["b_mask"], int)
|
||||
assert isinstance(file_info["a_mask"], int)
|
||||
file_info["rgb_mask"] = (
|
||||
file_info["r_mask"],
|
||||
file_info["g_mask"],
|
||||
file_info["b_mask"],
|
||||
)
|
||||
file_info["rgba_mask"] = (
|
||||
file_info["r_mask"],
|
||||
file_info["g_mask"],
|
||||
file_info["b_mask"],
|
||||
file_info["a_mask"],
|
||||
)
|
||||
else:
|
||||
msg = f"Unsupported BMP header type ({file_info['header_size']})"
|
||||
raise OSError(msg)
|
||||
|
||||
# ------------------ Special case : header is reported 40, which
|
||||
# ---------------------- is shorter than real size for bpp >= 16
|
||||
assert isinstance(file_info["width"], int)
|
||||
assert isinstance(file_info["height"], int)
|
||||
self._size = file_info["width"], file_info["height"]
|
||||
|
||||
# ------- If color count was not found in the header, compute from bits
|
||||
assert isinstance(file_info["bits"], int)
|
||||
file_info["colors"] = (
|
||||
file_info["colors"]
|
||||
if file_info.get("colors", 0)
|
||||
else (1 << file_info["bits"])
|
||||
)
|
||||
assert isinstance(file_info["colors"], int)
|
||||
if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8:
|
||||
offset += 4 * file_info["colors"]
|
||||
|
||||
# ---------------------- Check bit depth for unusual unsupported values
|
||||
self._mode, raw_mode = BIT2MODE.get(file_info["bits"], ("", ""))
|
||||
if not self.mode:
|
||||
msg = f"Unsupported BMP pixel depth ({file_info['bits']})"
|
||||
raise OSError(msg)
|
||||
|
||||
# ---------------- Process BMP with Bitfields compression (not palette)
|
||||
decoder_name = "raw"
|
||||
if file_info["compression"] == self.COMPRESSIONS["BITFIELDS"]:
|
||||
SUPPORTED: dict[int, list[tuple[int, ...]]] = {
|
||||
32: [
|
||||
(0xFF0000, 0xFF00, 0xFF, 0x0),
|
||||
(0xFF000000, 0xFF0000, 0xFF00, 0x0),
|
||||
(0xFF000000, 0xFF00, 0xFF, 0x0),
|
||||
(0xFF000000, 0xFF0000, 0xFF00, 0xFF),
|
||||
(0xFF, 0xFF00, 0xFF0000, 0xFF000000),
|
||||
(0xFF0000, 0xFF00, 0xFF, 0xFF000000),
|
||||
(0xFF000000, 0xFF00, 0xFF, 0xFF0000),
|
||||
(0x0, 0x0, 0x0, 0x0),
|
||||
],
|
||||
24: [(0xFF0000, 0xFF00, 0xFF)],
|
||||
16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)],
|
||||
}
|
||||
MASK_MODES = {
|
||||
(32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX",
|
||||
(32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR",
|
||||
(32, (0xFF000000, 0xFF00, 0xFF, 0x0)): "BGXR",
|
||||
(32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR",
|
||||
(32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA",
|
||||
(32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA",
|
||||
(32, (0xFF000000, 0xFF00, 0xFF, 0xFF0000)): "BGAR",
|
||||
(32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
|
||||
(24, (0xFF0000, 0xFF00, 0xFF)): "BGR",
|
||||
(16, (0xF800, 0x7E0, 0x1F)): "BGR;16",
|
||||
(16, (0x7C00, 0x3E0, 0x1F)): "BGR;15",
|
||||
}
|
||||
if file_info["bits"] in SUPPORTED:
|
||||
if (
|
||||
file_info["bits"] == 32
|
||||
and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]]
|
||||
):
|
||||
assert isinstance(file_info["rgba_mask"], tuple)
|
||||
raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])]
|
||||
self._mode = "RGBA" if "A" in raw_mode else self.mode
|
||||
elif (
|
||||
file_info["bits"] in (24, 16)
|
||||
and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]]
|
||||
):
|
||||
assert isinstance(file_info["rgb_mask"], tuple)
|
||||
raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])]
|
||||
else:
|
||||
msg = "Unsupported BMP bitfields layout"
|
||||
raise OSError(msg)
|
||||
else:
|
||||
msg = "Unsupported BMP bitfields layout"
|
||||
raise OSError(msg)
|
||||
elif file_info["compression"] == self.COMPRESSIONS["RAW"]:
|
||||
if file_info["bits"] == 32 and (
|
||||
header == 22 or USE_RAW_ALPHA # 32-bit .cur offset
|
||||
):
|
||||
raw_mode, self._mode = "BGRA", "RGBA"
|
||||
elif file_info["compression"] in (
|
||||
self.COMPRESSIONS["RLE8"],
|
||||
self.COMPRESSIONS["RLE4"],
|
||||
):
|
||||
decoder_name = "bmp_rle"
|
||||
else:
|
||||
msg = f"Unsupported BMP compression ({file_info['compression']})"
|
||||
raise OSError(msg)
|
||||
|
||||
# --------------- Once the header is processed, process the palette/LUT
|
||||
if self.mode == "P": # Paletted for 1, 4 and 8 bit images
|
||||
# ---------------------------------------------------- 1-bit images
|
||||
if not (0 < file_info["colors"] <= 65536):
|
||||
msg = f"Unsupported BMP Palette size ({file_info['colors']})"
|
||||
raise OSError(msg)
|
||||
else:
|
||||
assert isinstance(file_info["palette_padding"], int)
|
||||
padding = file_info["palette_padding"]
|
||||
palette = read(padding * file_info["colors"])
|
||||
grayscale = True
|
||||
indices = (
|
||||
(0, 255)
|
||||
if file_info["colors"] == 2
|
||||
else list(range(file_info["colors"]))
|
||||
)
|
||||
|
||||
# ----------------- Check if grayscale and ignore palette if so
|
||||
for ind, val in enumerate(indices):
|
||||
rgb = palette[ind * padding : ind * padding + 3]
|
||||
if rgb != o8(val) * 3:
|
||||
grayscale = False
|
||||
|
||||
# ------- If all colors are gray, white or black, ditch palette
|
||||
if grayscale:
|
||||
self._mode = "1" if file_info["colors"] == 2 else "L"
|
||||
raw_mode = self.mode
|
||||
else:
|
||||
self._mode = "P"
|
||||
self.palette = ImagePalette.raw(
|
||||
"BGRX" if padding == 4 else "BGR", palette
|
||||
)
|
||||
|
||||
# ---------------------------- Finally set the tile data for the plugin
|
||||
self.info["compression"] = file_info["compression"]
|
||||
args: list[Any] = [raw_mode]
|
||||
if decoder_name == "bmp_rle":
|
||||
args.append(file_info["compression"] == self.COMPRESSIONS["RLE4"])
|
||||
else:
|
||||
assert isinstance(file_info["width"], int)
|
||||
args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3))
|
||||
args.append(file_info["direction"])
|
||||
self.tile = [
|
||||
ImageFile._Tile(
|
||||
decoder_name,
|
||||
(0, 0, file_info["width"], file_info["height"]),
|
||||
offset or self.fp.tell(),
|
||||
tuple(args),
|
||||
)
|
||||
]
|
||||
|
||||
def _open(self) -> None:
|
||||
"""Open file, check magic number and read header"""
|
||||
# read 14 bytes: magic number, filesize, reserved, header final offset
|
||||
assert self.fp is not None
|
||||
head_data = self.fp.read(14)
|
||||
# choke if the file does not have the required magic bytes
|
||||
if not _accept(head_data):
|
||||
msg = "Not a BMP file"
|
||||
raise SyntaxError(msg)
|
||||
# read the start position of the BMP image data (u32)
|
||||
offset = i32(head_data, 10)
|
||||
# load bitmap information (offset=raster info)
|
||||
self._bitmap(offset=offset)
|
||||
|
||||
|
||||
class BmpRleDecoder(ImageFile.PyDecoder):
|
||||
_pulls_fd = True
|
||||
|
||||
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
||||
assert self.fd is not None
|
||||
rle4 = self.args[1]
|
||||
data = bytearray()
|
||||
x = 0
|
||||
dest_length = self.state.xsize * self.state.ysize
|
||||
while len(data) < dest_length:
|
||||
pixels = self.fd.read(1)
|
||||
byte = self.fd.read(1)
|
||||
if not pixels or not byte:
|
||||
break
|
||||
num_pixels = pixels[0]
|
||||
if num_pixels:
|
||||
# encoded mode
|
||||
if x + num_pixels > self.state.xsize:
|
||||
# Too much data for row
|
||||
num_pixels = max(0, self.state.xsize - x)
|
||||
if rle4:
|
||||
first_pixel = o8(byte[0] >> 4)
|
||||
second_pixel = o8(byte[0] & 0x0F)
|
||||
for index in range(num_pixels):
|
||||
if index % 2 == 0:
|
||||
data += first_pixel
|
||||
else:
|
||||
data += second_pixel
|
||||
else:
|
||||
data += byte * num_pixels
|
||||
x += num_pixels
|
||||
else:
|
||||
if byte[0] == 0:
|
||||
# end of line
|
||||
while len(data) % self.state.xsize != 0:
|
||||
data += b"\x00"
|
||||
x = 0
|
||||
elif byte[0] == 1:
|
||||
# end of bitmap
|
||||
break
|
||||
elif byte[0] == 2:
|
||||
# delta
|
||||
bytes_read = self.fd.read(2)
|
||||
if len(bytes_read) < 2:
|
||||
break
|
||||
right, up = self.fd.read(2)
|
||||
data += b"\x00" * (right + up * self.state.xsize)
|
||||
x = len(data) % self.state.xsize
|
||||
else:
|
||||
# absolute mode
|
||||
if rle4:
|
||||
# 2 pixels per byte
|
||||
byte_count = byte[0] // 2
|
||||
bytes_read = self.fd.read(byte_count)
|
||||
for byte_read in bytes_read:
|
||||
data += o8(byte_read >> 4)
|
||||
data += o8(byte_read & 0x0F)
|
||||
else:
|
||||
byte_count = byte[0]
|
||||
bytes_read = self.fd.read(byte_count)
|
||||
data += bytes_read
|
||||
if len(bytes_read) < byte_count:
|
||||
break
|
||||
x += byte[0]
|
||||
|
||||
# align to 16-bit word boundary
|
||||
if self.fd.tell() % 2 != 0:
|
||||
self.fd.seek(1, os.SEEK_CUR)
|
||||
rawmode = "L" if self.mode == "L" else "P"
|
||||
self.set_as_raw(bytes(data), rawmode, (0, self.args[-1]))
|
||||
return -1, 0
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Image plugin for the DIB format (BMP alias)
|
||||
# =============================================================================
|
||||
class DibImageFile(BmpImageFile):
|
||||
format = "DIB"
|
||||
format_description = "Windows Bitmap"
|
||||
|
||||
def _open(self) -> None:
|
||||
self._bitmap()
|
||||
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
# Write BMP file
|
||||
|
||||
|
||||
SAVE = {
|
||||
"1": ("1", 1, 2),
|
||||
"L": ("L", 8, 256),
|
||||
"P": ("P", 8, 256),
|
||||
"RGB": ("BGR", 24, 0),
|
||||
"RGBA": ("BGRA", 32, 0),
|
||||
}
|
||||
|
||||
|
||||
def _dib_save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||
_save(im, fp, filename, False)
|
||||
|
||||
|
||||
def _save(
|
||||
im: Image.Image, fp: IO[bytes], filename: str | bytes, bitmap_header: bool = True
|
||||
) -> None:
|
||||
try:
|
||||
rawmode, bits, colors = SAVE[im.mode]
|
||||
except KeyError as e:
|
||||
msg = f"cannot write mode {im.mode} as BMP"
|
||||
raise OSError(msg) from e
|
||||
|
||||
info = im.encoderinfo
|
||||
|
||||
dpi = info.get("dpi", (96, 96))
|
||||
|
||||
# 1 meter == 39.3701 inches
|
||||
ppm = tuple(int(x * 39.3701 + 0.5) for x in dpi)
|
||||
|
||||
stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3)
|
||||
header = 40 # or 64 for OS/2 version 2
|
||||
image = stride * im.size[1]
|
||||
|
||||
if im.mode == "1":
|
||||
palette = b"".join(o8(i) * 3 + b"\x00" for i in (0, 255))
|
||||
elif im.mode == "L":
|
||||
palette = b"".join(o8(i) * 3 + b"\x00" for i in range(256))
|
||||
elif im.mode == "P":
|
||||
palette = im.im.getpalette("RGB", "BGRX")
|
||||
colors = len(palette) // 4
|
||||
else:
|
||||
palette = None
|
||||
|
||||
# bitmap header
|
||||
if bitmap_header:
|
||||
offset = 14 + header + colors * 4
|
||||
file_size = offset + image
|
||||
if file_size > 2**32 - 1:
|
||||
msg = "File size is too large for the BMP format"
|
||||
raise ValueError(msg)
|
||||
fp.write(
|
||||
b"BM" # file type (magic)
|
||||
+ o32(file_size) # file size
|
||||
+ o32(0) # reserved
|
||||
+ o32(offset) # image data offset
|
||||
)
|
||||
|
||||
# bitmap info header
|
||||
fp.write(
|
||||
o32(header) # info header size
|
||||
+ o32(im.size[0]) # width
|
||||
+ o32(im.size[1]) # height
|
||||
+ o16(1) # planes
|
||||
+ o16(bits) # depth
|
||||
+ o32(0) # compression (0=uncompressed)
|
||||
+ o32(image) # size of bitmap
|
||||
+ o32(ppm[0]) # resolution
|
||||
+ o32(ppm[1]) # resolution
|
||||
+ o32(colors) # colors used
|
||||
+ o32(colors) # colors important
|
||||
)
|
||||
|
||||
fp.write(b"\0" * (header - 40)) # padding (for OS/2 format)
|
||||
|
||||
if palette:
|
||||
fp.write(palette)
|
||||
|
||||
ImageFile._save(
|
||||
im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))]
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
# Registry
|
||||
|
||||
|
||||
Image.register_open(BmpImageFile.format, BmpImageFile, _accept)
|
||||
Image.register_save(BmpImageFile.format, _save)
|
||||
|
||||
Image.register_extension(BmpImageFile.format, ".bmp")
|
||||
|
||||
Image.register_mime(BmpImageFile.format, "image/bmp")
|
||||
|
||||
Image.register_decoder("bmp_rle", BmpRleDecoder)
|
||||
|
||||
Image.register_open(DibImageFile.format, DibImageFile, _dib_accept)
|
||||
Image.register_save(DibImageFile.format, _dib_save)
|
||||
|
||||
Image.register_extension(DibImageFile.format, ".dib")
|
||||
|
||||
Image.register_mime(DibImageFile.format, "image/bmp")
|
||||
76
venv/lib/python3.11/site-packages/PIL/BufrStubImagePlugin.py
Normal file
76
venv/lib/python3.11/site-packages/PIL/BufrStubImagePlugin.py
Normal file
@ -0,0 +1,76 @@
|
||||
#
|
||||
# The Python Imaging Library
|
||||
# $Id$
|
||||
#
|
||||
# BUFR stub adapter
|
||||
#
|
||||
# Copyright (c) 1996-2003 by Fredrik Lundh
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import IO
|
||||
|
||||
from . import Image, ImageFile
|
||||
|
||||
_handler = None
|
||||
|
||||
|
||||
def register_handler(handler: ImageFile.StubHandler | None) -> None:
|
||||
"""
|
||||
Install application-specific BUFR image handler.
|
||||
|
||||
:param handler: Handler object.
|
||||
"""
|
||||
global _handler
|
||||
_handler = handler
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Image adapter
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith((b"BUFR", b"ZCZC"))
|
||||
|
||||
|
||||
class BufrStubImageFile(ImageFile.StubImageFile):
|
||||
format = "BUFR"
|
||||
format_description = "BUFR"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
if not _accept(self.fp.read(4)):
|
||||
msg = "Not a BUFR file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
self.fp.seek(-4, os.SEEK_CUR)
|
||||
|
||||
# make something up
|
||||
self._mode = "F"
|
||||
self._size = 1, 1
|
||||
|
||||
loader = self._load()
|
||||
if loader:
|
||||
loader.open(self)
|
||||
|
||||
def _load(self) -> ImageFile.StubHandler | None:
|
||||
return _handler
|
||||
|
||||
|
||||
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||
if _handler is None or not hasattr(_handler, "save"):
|
||||
msg = "BUFR save handler not installed"
|
||||
raise OSError(msg)
|
||||
_handler.save(im, fp, filename)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Registry
|
||||
|
||||
Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept)
|
||||
Image.register_save(BufrStubImageFile.format, _save)
|
||||
|
||||
Image.register_extension(BufrStubImageFile.format, ".bufr")
|
||||
173
venv/lib/python3.11/site-packages/PIL/ContainerIO.py
Normal file
173
venv/lib/python3.11/site-packages/PIL/ContainerIO.py
Normal file
@ -0,0 +1,173 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# a class to read from a container file
|
||||
#
|
||||
# History:
|
||||
# 1995-06-18 fl Created
|
||||
# 1995-09-07 fl Added readline(), readlines()
|
||||
#
|
||||
# Copyright (c) 1997-2001 by Secret Labs AB
|
||||
# Copyright (c) 1995 by Fredrik Lundh
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
from collections.abc import Iterable
|
||||
from typing import IO, AnyStr, NoReturn
|
||||
|
||||
|
||||
class ContainerIO(IO[AnyStr]):
|
||||
"""
|
||||
A file object that provides read access to a part of an existing
|
||||
file (for example a TAR file).
|
||||
"""
|
||||
|
||||
def __init__(self, file: IO[AnyStr], offset: int, length: int) -> None:
|
||||
"""
|
||||
Create file object.
|
||||
|
||||
:param file: Existing file.
|
||||
:param offset: Start of region, in bytes.
|
||||
:param length: Size of region, in bytes.
|
||||
"""
|
||||
self.fh: IO[AnyStr] = file
|
||||
self.pos = 0
|
||||
self.offset = offset
|
||||
self.length = length
|
||||
self.fh.seek(offset)
|
||||
|
||||
##
|
||||
# Always false.
|
||||
|
||||
def isatty(self) -> bool:
|
||||
return False
|
||||
|
||||
def seekable(self) -> bool:
|
||||
return True
|
||||
|
||||
def seek(self, offset: int, mode: int = io.SEEK_SET) -> int:
|
||||
"""
|
||||
Move file pointer.
|
||||
|
||||
:param offset: Offset in bytes.
|
||||
:param mode: Starting position. Use 0 for beginning of region, 1
|
||||
for current offset, and 2 for end of region. You cannot move
|
||||
the pointer outside the defined region.
|
||||
:returns: Offset from start of region, in bytes.
|
||||
"""
|
||||
if mode == 1:
|
||||
self.pos = self.pos + offset
|
||||
elif mode == 2:
|
||||
self.pos = self.length + offset
|
||||
else:
|
||||
self.pos = offset
|
||||
# clamp
|
||||
self.pos = max(0, min(self.pos, self.length))
|
||||
self.fh.seek(self.offset + self.pos)
|
||||
return self.pos
|
||||
|
||||
def tell(self) -> int:
|
||||
"""
|
||||
Get current file pointer.
|
||||
|
||||
:returns: Offset from start of region, in bytes.
|
||||
"""
|
||||
return self.pos
|
||||
|
||||
def readable(self) -> bool:
|
||||
return True
|
||||
|
||||
def read(self, n: int = -1) -> AnyStr:
|
||||
"""
|
||||
Read data.
|
||||
|
||||
:param n: Number of bytes to read. If omitted, zero or negative,
|
||||
read until end of region.
|
||||
:returns: An 8-bit string.
|
||||
"""
|
||||
if n > 0:
|
||||
n = min(n, self.length - self.pos)
|
||||
else:
|
||||
n = self.length - self.pos
|
||||
if n <= 0: # EOF
|
||||
return b"" if "b" in self.fh.mode else "" # type: ignore[return-value]
|
||||
self.pos = self.pos + n
|
||||
return self.fh.read(n)
|
||||
|
||||
def readline(self, n: int = -1) -> AnyStr:
|
||||
"""
|
||||
Read a line of text.
|
||||
|
||||
:param n: Number of bytes to read. If omitted, zero or negative,
|
||||
read until end of line.
|
||||
:returns: An 8-bit string.
|
||||
"""
|
||||
s: AnyStr = b"" if "b" in self.fh.mode else "" # type: ignore[assignment]
|
||||
newline_character = b"\n" if "b" in self.fh.mode else "\n"
|
||||
while True:
|
||||
c = self.read(1)
|
||||
if not c:
|
||||
break
|
||||
s = s + c
|
||||
if c == newline_character or len(s) == n:
|
||||
break
|
||||
return s
|
||||
|
||||
def readlines(self, n: int | None = -1) -> list[AnyStr]:
|
||||
"""
|
||||
Read multiple lines of text.
|
||||
|
||||
:param n: Number of lines to read. If omitted, zero, negative or None,
|
||||
read until end of region.
|
||||
:returns: A list of 8-bit strings.
|
||||
"""
|
||||
lines = []
|
||||
while True:
|
||||
s = self.readline()
|
||||
if not s:
|
||||
break
|
||||
lines.append(s)
|
||||
if len(lines) == n:
|
||||
break
|
||||
return lines
|
||||
|
||||
def writable(self) -> bool:
|
||||
return False
|
||||
|
||||
def write(self, b: AnyStr) -> NoReturn:
|
||||
raise NotImplementedError()
|
||||
|
||||
def writelines(self, lines: Iterable[AnyStr]) -> NoReturn:
|
||||
raise NotImplementedError()
|
||||
|
||||
def truncate(self, size: int | None = None) -> int:
|
||||
raise NotImplementedError()
|
||||
|
||||
def __enter__(self) -> ContainerIO[AnyStr]:
|
||||
return self
|
||||
|
||||
def __exit__(self, *args: object) -> None:
|
||||
self.close()
|
||||
|
||||
def __iter__(self) -> ContainerIO[AnyStr]:
|
||||
return self
|
||||
|
||||
def __next__(self) -> AnyStr:
|
||||
line = self.readline()
|
||||
if not line:
|
||||
msg = "end of region"
|
||||
raise StopIteration(msg)
|
||||
return line
|
||||
|
||||
def fileno(self) -> int:
|
||||
return self.fh.fileno()
|
||||
|
||||
def flush(self) -> None:
|
||||
self.fh.flush()
|
||||
|
||||
def close(self) -> None:
|
||||
self.fh.close()
|
||||
75
venv/lib/python3.11/site-packages/PIL/CurImagePlugin.py
Normal file
75
venv/lib/python3.11/site-packages/PIL/CurImagePlugin.py
Normal file
@ -0,0 +1,75 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# Windows Cursor support for PIL
|
||||
#
|
||||
# notes:
|
||||
# uses BmpImagePlugin.py to read the bitmap data.
|
||||
#
|
||||
# history:
|
||||
# 96-05-27 fl Created
|
||||
#
|
||||
# Copyright (c) Secret Labs AB 1997.
|
||||
# Copyright (c) Fredrik Lundh 1996.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
from . import BmpImagePlugin, Image
|
||||
from ._binary import i16le as i16
|
||||
from ._binary import i32le as i32
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(b"\0\0\2\0")
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for Windows Cursor files.
|
||||
|
||||
|
||||
class CurImageFile(BmpImagePlugin.BmpImageFile):
|
||||
format = "CUR"
|
||||
format_description = "Windows Cursor"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
offset = self.fp.tell()
|
||||
|
||||
# check magic
|
||||
s = self.fp.read(6)
|
||||
if not _accept(s):
|
||||
msg = "not a CUR file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
# pick the largest cursor in the file
|
||||
m = b""
|
||||
for i in range(i16(s, 4)):
|
||||
s = self.fp.read(16)
|
||||
if not m:
|
||||
m = s
|
||||
elif s[0] > m[0] and s[1] > m[1]:
|
||||
m = s
|
||||
if not m:
|
||||
msg = "No cursors were found"
|
||||
raise TypeError(msg)
|
||||
|
||||
# load as bitmap
|
||||
self._bitmap(i32(m, 12) + offset)
|
||||
|
||||
# patch up the bitmap height
|
||||
self._size = self.size[0], self.size[1] // 2
|
||||
self.tile = [self.tile[0]._replace(extents=(0, 0) + self.size)]
|
||||
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
Image.register_open(CurImageFile.format, CurImageFile, _accept)
|
||||
|
||||
Image.register_extension(CurImageFile.format, ".cur")
|
||||
84
venv/lib/python3.11/site-packages/PIL/DcxImagePlugin.py
Normal file
84
venv/lib/python3.11/site-packages/PIL/DcxImagePlugin.py
Normal file
@ -0,0 +1,84 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# DCX file handling
|
||||
#
|
||||
# DCX is a container file format defined by Intel, commonly used
|
||||
# for fax applications. Each DCX file consists of a directory
|
||||
# (a list of file offsets) followed by a set of (usually 1-bit)
|
||||
# PCX files.
|
||||
#
|
||||
# History:
|
||||
# 1995-09-09 fl Created
|
||||
# 1996-03-20 fl Properly derived from PcxImageFile.
|
||||
# 1998-07-15 fl Renamed offset attribute to avoid name clash
|
||||
# 2002-07-30 fl Fixed file handling
|
||||
#
|
||||
# Copyright (c) 1997-98 by Secret Labs AB.
|
||||
# Copyright (c) 1995-96 by Fredrik Lundh.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
from . import Image
|
||||
from ._binary import i32le as i32
|
||||
from ._util import DeferredError
|
||||
from .PcxImagePlugin import PcxImageFile
|
||||
|
||||
MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then?
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return len(prefix) >= 4 and i32(prefix) == MAGIC
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for the Intel DCX format.
|
||||
|
||||
|
||||
class DcxImageFile(PcxImageFile):
|
||||
format = "DCX"
|
||||
format_description = "Intel DCX"
|
||||
_close_exclusive_fp_after_loading = False
|
||||
|
||||
def _open(self) -> None:
|
||||
# Header
|
||||
assert self.fp is not None
|
||||
s = self.fp.read(4)
|
||||
if not _accept(s):
|
||||
msg = "not a DCX file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
# Component directory
|
||||
self._offset = []
|
||||
for i in range(1024):
|
||||
offset = i32(self.fp.read(4))
|
||||
if not offset:
|
||||
break
|
||||
self._offset.append(offset)
|
||||
|
||||
self._fp = self.fp
|
||||
self.frame = -1
|
||||
self.n_frames = len(self._offset)
|
||||
self.is_animated = self.n_frames > 1
|
||||
self.seek(0)
|
||||
|
||||
def seek(self, frame: int) -> None:
|
||||
if not self._seek_check(frame):
|
||||
return
|
||||
if isinstance(self._fp, DeferredError):
|
||||
raise self._fp.ex
|
||||
self.frame = frame
|
||||
self.fp = self._fp
|
||||
self.fp.seek(self._offset[frame])
|
||||
PcxImageFile._open(self)
|
||||
|
||||
def tell(self) -> int:
|
||||
return self.frame
|
||||
|
||||
|
||||
Image.register_open(DcxImageFile.format, DcxImageFile, _accept)
|
||||
|
||||
Image.register_extension(DcxImageFile.format, ".dcx")
|
||||
625
venv/lib/python3.11/site-packages/PIL/DdsImagePlugin.py
Normal file
625
venv/lib/python3.11/site-packages/PIL/DdsImagePlugin.py
Normal file
@ -0,0 +1,625 @@
|
||||
"""
|
||||
A Pillow plugin for .dds files (S3TC-compressed aka DXTC)
|
||||
Jerome Leclanche <jerome@leclan.ch>
|
||||
|
||||
Documentation:
|
||||
https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt
|
||||
|
||||
The contents of this file are hereby released in the public domain (CC0)
|
||||
Full text of the CC0 license:
|
||||
https://creativecommons.org/publicdomain/zero/1.0/
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import struct
|
||||
import sys
|
||||
from enum import IntEnum, IntFlag
|
||||
from typing import IO
|
||||
|
||||
from . import Image, ImageFile, ImagePalette
|
||||
from ._binary import i32le as i32
|
||||
from ._binary import o8
|
||||
from ._binary import o32le as o32
|
||||
|
||||
# Magic ("DDS ")
|
||||
DDS_MAGIC = 0x20534444
|
||||
|
||||
|
||||
# DDS flags
|
||||
class DDSD(IntFlag):
|
||||
CAPS = 0x1
|
||||
HEIGHT = 0x2
|
||||
WIDTH = 0x4
|
||||
PITCH = 0x8
|
||||
PIXELFORMAT = 0x1000
|
||||
MIPMAPCOUNT = 0x20000
|
||||
LINEARSIZE = 0x80000
|
||||
DEPTH = 0x800000
|
||||
|
||||
|
||||
# DDS caps
|
||||
class DDSCAPS(IntFlag):
|
||||
COMPLEX = 0x8
|
||||
TEXTURE = 0x1000
|
||||
MIPMAP = 0x400000
|
||||
|
||||
|
||||
class DDSCAPS2(IntFlag):
|
||||
CUBEMAP = 0x200
|
||||
CUBEMAP_POSITIVEX = 0x400
|
||||
CUBEMAP_NEGATIVEX = 0x800
|
||||
CUBEMAP_POSITIVEY = 0x1000
|
||||
CUBEMAP_NEGATIVEY = 0x2000
|
||||
CUBEMAP_POSITIVEZ = 0x4000
|
||||
CUBEMAP_NEGATIVEZ = 0x8000
|
||||
VOLUME = 0x200000
|
||||
|
||||
|
||||
# Pixel Format
|
||||
class DDPF(IntFlag):
|
||||
ALPHAPIXELS = 0x1
|
||||
ALPHA = 0x2
|
||||
FOURCC = 0x4
|
||||
PALETTEINDEXED8 = 0x20
|
||||
RGB = 0x40
|
||||
LUMINANCE = 0x20000
|
||||
|
||||
|
||||
# dxgiformat.h
|
||||
class DXGI_FORMAT(IntEnum):
|
||||
UNKNOWN = 0
|
||||
R32G32B32A32_TYPELESS = 1
|
||||
R32G32B32A32_FLOAT = 2
|
||||
R32G32B32A32_UINT = 3
|
||||
R32G32B32A32_SINT = 4
|
||||
R32G32B32_TYPELESS = 5
|
||||
R32G32B32_FLOAT = 6
|
||||
R32G32B32_UINT = 7
|
||||
R32G32B32_SINT = 8
|
||||
R16G16B16A16_TYPELESS = 9
|
||||
R16G16B16A16_FLOAT = 10
|
||||
R16G16B16A16_UNORM = 11
|
||||
R16G16B16A16_UINT = 12
|
||||
R16G16B16A16_SNORM = 13
|
||||
R16G16B16A16_SINT = 14
|
||||
R32G32_TYPELESS = 15
|
||||
R32G32_FLOAT = 16
|
||||
R32G32_UINT = 17
|
||||
R32G32_SINT = 18
|
||||
R32G8X24_TYPELESS = 19
|
||||
D32_FLOAT_S8X24_UINT = 20
|
||||
R32_FLOAT_X8X24_TYPELESS = 21
|
||||
X32_TYPELESS_G8X24_UINT = 22
|
||||
R10G10B10A2_TYPELESS = 23
|
||||
R10G10B10A2_UNORM = 24
|
||||
R10G10B10A2_UINT = 25
|
||||
R11G11B10_FLOAT = 26
|
||||
R8G8B8A8_TYPELESS = 27
|
||||
R8G8B8A8_UNORM = 28
|
||||
R8G8B8A8_UNORM_SRGB = 29
|
||||
R8G8B8A8_UINT = 30
|
||||
R8G8B8A8_SNORM = 31
|
||||
R8G8B8A8_SINT = 32
|
||||
R16G16_TYPELESS = 33
|
||||
R16G16_FLOAT = 34
|
||||
R16G16_UNORM = 35
|
||||
R16G16_UINT = 36
|
||||
R16G16_SNORM = 37
|
||||
R16G16_SINT = 38
|
||||
R32_TYPELESS = 39
|
||||
D32_FLOAT = 40
|
||||
R32_FLOAT = 41
|
||||
R32_UINT = 42
|
||||
R32_SINT = 43
|
||||
R24G8_TYPELESS = 44
|
||||
D24_UNORM_S8_UINT = 45
|
||||
R24_UNORM_X8_TYPELESS = 46
|
||||
X24_TYPELESS_G8_UINT = 47
|
||||
R8G8_TYPELESS = 48
|
||||
R8G8_UNORM = 49
|
||||
R8G8_UINT = 50
|
||||
R8G8_SNORM = 51
|
||||
R8G8_SINT = 52
|
||||
R16_TYPELESS = 53
|
||||
R16_FLOAT = 54
|
||||
D16_UNORM = 55
|
||||
R16_UNORM = 56
|
||||
R16_UINT = 57
|
||||
R16_SNORM = 58
|
||||
R16_SINT = 59
|
||||
R8_TYPELESS = 60
|
||||
R8_UNORM = 61
|
||||
R8_UINT = 62
|
||||
R8_SNORM = 63
|
||||
R8_SINT = 64
|
||||
A8_UNORM = 65
|
||||
R1_UNORM = 66
|
||||
R9G9B9E5_SHAREDEXP = 67
|
||||
R8G8_B8G8_UNORM = 68
|
||||
G8R8_G8B8_UNORM = 69
|
||||
BC1_TYPELESS = 70
|
||||
BC1_UNORM = 71
|
||||
BC1_UNORM_SRGB = 72
|
||||
BC2_TYPELESS = 73
|
||||
BC2_UNORM = 74
|
||||
BC2_UNORM_SRGB = 75
|
||||
BC3_TYPELESS = 76
|
||||
BC3_UNORM = 77
|
||||
BC3_UNORM_SRGB = 78
|
||||
BC4_TYPELESS = 79
|
||||
BC4_UNORM = 80
|
||||
BC4_SNORM = 81
|
||||
BC5_TYPELESS = 82
|
||||
BC5_UNORM = 83
|
||||
BC5_SNORM = 84
|
||||
B5G6R5_UNORM = 85
|
||||
B5G5R5A1_UNORM = 86
|
||||
B8G8R8A8_UNORM = 87
|
||||
B8G8R8X8_UNORM = 88
|
||||
R10G10B10_XR_BIAS_A2_UNORM = 89
|
||||
B8G8R8A8_TYPELESS = 90
|
||||
B8G8R8A8_UNORM_SRGB = 91
|
||||
B8G8R8X8_TYPELESS = 92
|
||||
B8G8R8X8_UNORM_SRGB = 93
|
||||
BC6H_TYPELESS = 94
|
||||
BC6H_UF16 = 95
|
||||
BC6H_SF16 = 96
|
||||
BC7_TYPELESS = 97
|
||||
BC7_UNORM = 98
|
||||
BC7_UNORM_SRGB = 99
|
||||
AYUV = 100
|
||||
Y410 = 101
|
||||
Y416 = 102
|
||||
NV12 = 103
|
||||
P010 = 104
|
||||
P016 = 105
|
||||
OPAQUE_420 = 106
|
||||
YUY2 = 107
|
||||
Y210 = 108
|
||||
Y216 = 109
|
||||
NV11 = 110
|
||||
AI44 = 111
|
||||
IA44 = 112
|
||||
P8 = 113
|
||||
A8P8 = 114
|
||||
B4G4R4A4_UNORM = 115
|
||||
P208 = 130
|
||||
V208 = 131
|
||||
V408 = 132
|
||||
SAMPLER_FEEDBACK_MIN_MIP_OPAQUE = 189
|
||||
SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE = 190
|
||||
|
||||
|
||||
class D3DFMT(IntEnum):
|
||||
UNKNOWN = 0
|
||||
R8G8B8 = 20
|
||||
A8R8G8B8 = 21
|
||||
X8R8G8B8 = 22
|
||||
R5G6B5 = 23
|
||||
X1R5G5B5 = 24
|
||||
A1R5G5B5 = 25
|
||||
A4R4G4B4 = 26
|
||||
R3G3B2 = 27
|
||||
A8 = 28
|
||||
A8R3G3B2 = 29
|
||||
X4R4G4B4 = 30
|
||||
A2B10G10R10 = 31
|
||||
A8B8G8R8 = 32
|
||||
X8B8G8R8 = 33
|
||||
G16R16 = 34
|
||||
A2R10G10B10 = 35
|
||||
A16B16G16R16 = 36
|
||||
A8P8 = 40
|
||||
P8 = 41
|
||||
L8 = 50
|
||||
A8L8 = 51
|
||||
A4L4 = 52
|
||||
V8U8 = 60
|
||||
L6V5U5 = 61
|
||||
X8L8V8U8 = 62
|
||||
Q8W8V8U8 = 63
|
||||
V16U16 = 64
|
||||
A2W10V10U10 = 67
|
||||
D16_LOCKABLE = 70
|
||||
D32 = 71
|
||||
D15S1 = 73
|
||||
D24S8 = 75
|
||||
D24X8 = 77
|
||||
D24X4S4 = 79
|
||||
D16 = 80
|
||||
D32F_LOCKABLE = 82
|
||||
D24FS8 = 83
|
||||
D32_LOCKABLE = 84
|
||||
S8_LOCKABLE = 85
|
||||
L16 = 81
|
||||
VERTEXDATA = 100
|
||||
INDEX16 = 101
|
||||
INDEX32 = 102
|
||||
Q16W16V16U16 = 110
|
||||
R16F = 111
|
||||
G16R16F = 112
|
||||
A16B16G16R16F = 113
|
||||
R32F = 114
|
||||
G32R32F = 115
|
||||
A32B32G32R32F = 116
|
||||
CxV8U8 = 117
|
||||
A1 = 118
|
||||
A2B10G10R10_XR_BIAS = 119
|
||||
BINARYBUFFER = 199
|
||||
|
||||
UYVY = i32(b"UYVY")
|
||||
R8G8_B8G8 = i32(b"RGBG")
|
||||
YUY2 = i32(b"YUY2")
|
||||
G8R8_G8B8 = i32(b"GRGB")
|
||||
DXT1 = i32(b"DXT1")
|
||||
DXT2 = i32(b"DXT2")
|
||||
DXT3 = i32(b"DXT3")
|
||||
DXT4 = i32(b"DXT4")
|
||||
DXT5 = i32(b"DXT5")
|
||||
DX10 = i32(b"DX10")
|
||||
BC4S = i32(b"BC4S")
|
||||
BC4U = i32(b"BC4U")
|
||||
BC5S = i32(b"BC5S")
|
||||
BC5U = i32(b"BC5U")
|
||||
ATI1 = i32(b"ATI1")
|
||||
ATI2 = i32(b"ATI2")
|
||||
MULTI2_ARGB8 = i32(b"MET1")
|
||||
|
||||
|
||||
# Backward compatibility layer
|
||||
module = sys.modules[__name__]
|
||||
for item in DDSD:
|
||||
assert item.name is not None
|
||||
setattr(module, f"DDSD_{item.name}", item.value)
|
||||
for item1 in DDSCAPS:
|
||||
assert item1.name is not None
|
||||
setattr(module, f"DDSCAPS_{item1.name}", item1.value)
|
||||
for item2 in DDSCAPS2:
|
||||
assert item2.name is not None
|
||||
setattr(module, f"DDSCAPS2_{item2.name}", item2.value)
|
||||
for item3 in DDPF:
|
||||
assert item3.name is not None
|
||||
setattr(module, f"DDPF_{item3.name}", item3.value)
|
||||
|
||||
DDS_FOURCC = DDPF.FOURCC
|
||||
DDS_RGB = DDPF.RGB
|
||||
DDS_RGBA = DDPF.RGB | DDPF.ALPHAPIXELS
|
||||
DDS_LUMINANCE = DDPF.LUMINANCE
|
||||
DDS_LUMINANCEA = DDPF.LUMINANCE | DDPF.ALPHAPIXELS
|
||||
DDS_ALPHA = DDPF.ALPHA
|
||||
DDS_PAL8 = DDPF.PALETTEINDEXED8
|
||||
|
||||
DDS_HEADER_FLAGS_TEXTURE = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PIXELFORMAT
|
||||
DDS_HEADER_FLAGS_MIPMAP = DDSD.MIPMAPCOUNT
|
||||
DDS_HEADER_FLAGS_VOLUME = DDSD.DEPTH
|
||||
DDS_HEADER_FLAGS_PITCH = DDSD.PITCH
|
||||
DDS_HEADER_FLAGS_LINEARSIZE = DDSD.LINEARSIZE
|
||||
|
||||
DDS_HEIGHT = DDSD.HEIGHT
|
||||
DDS_WIDTH = DDSD.WIDTH
|
||||
|
||||
DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS.TEXTURE
|
||||
DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS.COMPLEX | DDSCAPS.MIPMAP
|
||||
DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS.COMPLEX
|
||||
|
||||
DDS_CUBEMAP_POSITIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEX
|
||||
DDS_CUBEMAP_NEGATIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEX
|
||||
DDS_CUBEMAP_POSITIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEY
|
||||
DDS_CUBEMAP_NEGATIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEY
|
||||
DDS_CUBEMAP_POSITIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEZ
|
||||
DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEZ
|
||||
|
||||
DXT1_FOURCC = D3DFMT.DXT1
|
||||
DXT3_FOURCC = D3DFMT.DXT3
|
||||
DXT5_FOURCC = D3DFMT.DXT5
|
||||
|
||||
DXGI_FORMAT_R8G8B8A8_TYPELESS = DXGI_FORMAT.R8G8B8A8_TYPELESS
|
||||
DXGI_FORMAT_R8G8B8A8_UNORM = DXGI_FORMAT.R8G8B8A8_UNORM
|
||||
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = DXGI_FORMAT.R8G8B8A8_UNORM_SRGB
|
||||
DXGI_FORMAT_BC5_TYPELESS = DXGI_FORMAT.BC5_TYPELESS
|
||||
DXGI_FORMAT_BC5_UNORM = DXGI_FORMAT.BC5_UNORM
|
||||
DXGI_FORMAT_BC5_SNORM = DXGI_FORMAT.BC5_SNORM
|
||||
DXGI_FORMAT_BC6H_UF16 = DXGI_FORMAT.BC6H_UF16
|
||||
DXGI_FORMAT_BC6H_SF16 = DXGI_FORMAT.BC6H_SF16
|
||||
DXGI_FORMAT_BC7_TYPELESS = DXGI_FORMAT.BC7_TYPELESS
|
||||
DXGI_FORMAT_BC7_UNORM = DXGI_FORMAT.BC7_UNORM
|
||||
DXGI_FORMAT_BC7_UNORM_SRGB = DXGI_FORMAT.BC7_UNORM_SRGB
|
||||
|
||||
|
||||
class DdsImageFile(ImageFile.ImageFile):
|
||||
format = "DDS"
|
||||
format_description = "DirectDraw Surface"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
if not _accept(self.fp.read(4)):
|
||||
msg = "not a DDS file"
|
||||
raise SyntaxError(msg)
|
||||
(header_size,) = struct.unpack("<I", self.fp.read(4))
|
||||
if header_size != 124:
|
||||
msg = f"Unsupported header size {repr(header_size)}"
|
||||
raise OSError(msg)
|
||||
header = self.fp.read(header_size - 4)
|
||||
if len(header) != 120:
|
||||
msg = f"Incomplete header: {len(header)} bytes"
|
||||
raise OSError(msg)
|
||||
|
||||
flags, height, width = struct.unpack("<3I", header[:12])
|
||||
self._size = (width, height)
|
||||
extents = (0, 0) + self.size
|
||||
|
||||
pitch, depth, mipmaps = struct.unpack("<3I", header[12:24])
|
||||
struct.unpack("<11I", header[24:68]) # reserved
|
||||
|
||||
# pixel format
|
||||
pfsize, pfflags, fourcc, bitcount = struct.unpack("<4I", header[68:84])
|
||||
n = 0
|
||||
rawmode = None
|
||||
if pfflags & DDPF.RGB:
|
||||
# Texture contains uncompressed RGB data
|
||||
if pfflags & DDPF.ALPHAPIXELS:
|
||||
self._mode = "RGBA"
|
||||
mask_count = 4
|
||||
else:
|
||||
self._mode = "RGB"
|
||||
mask_count = 3
|
||||
|
||||
masks = struct.unpack(f"<{mask_count}I", header[84 : 84 + mask_count * 4])
|
||||
self.tile = [ImageFile._Tile("dds_rgb", extents, 0, (bitcount, masks))]
|
||||
return
|
||||
elif pfflags & DDPF.LUMINANCE:
|
||||
if bitcount == 8:
|
||||
self._mode = "L"
|
||||
elif bitcount == 16 and pfflags & DDPF.ALPHAPIXELS:
|
||||
self._mode = "LA"
|
||||
else:
|
||||
msg = f"Unsupported bitcount {bitcount} for {pfflags}"
|
||||
raise OSError(msg)
|
||||
elif pfflags & DDPF.PALETTEINDEXED8:
|
||||
self._mode = "P"
|
||||
self.palette = ImagePalette.raw("RGBA", self.fp.read(1024))
|
||||
self.palette.mode = "RGBA"
|
||||
elif pfflags & DDPF.FOURCC:
|
||||
offset = header_size + 4
|
||||
if fourcc == D3DFMT.DXT1:
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "DXT1"
|
||||
n = 1
|
||||
elif fourcc == D3DFMT.DXT3:
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "DXT3"
|
||||
n = 2
|
||||
elif fourcc == D3DFMT.DXT5:
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "DXT5"
|
||||
n = 3
|
||||
elif fourcc in (D3DFMT.BC4U, D3DFMT.ATI1):
|
||||
self._mode = "L"
|
||||
self.pixel_format = "BC4"
|
||||
n = 4
|
||||
elif fourcc == D3DFMT.BC5S:
|
||||
self._mode = "RGB"
|
||||
self.pixel_format = "BC5S"
|
||||
n = 5
|
||||
elif fourcc in (D3DFMT.BC5U, D3DFMT.ATI2):
|
||||
self._mode = "RGB"
|
||||
self.pixel_format = "BC5"
|
||||
n = 5
|
||||
elif fourcc == D3DFMT.DX10:
|
||||
offset += 20
|
||||
# ignoring flags which pertain to volume textures and cubemaps
|
||||
(dxgi_format,) = struct.unpack("<I", self.fp.read(4))
|
||||
self.fp.read(16)
|
||||
if dxgi_format in (
|
||||
DXGI_FORMAT.BC1_UNORM,
|
||||
DXGI_FORMAT.BC1_TYPELESS,
|
||||
):
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "BC1"
|
||||
n = 1
|
||||
elif dxgi_format in (DXGI_FORMAT.BC2_TYPELESS, DXGI_FORMAT.BC2_UNORM):
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "BC2"
|
||||
n = 2
|
||||
elif dxgi_format in (DXGI_FORMAT.BC3_TYPELESS, DXGI_FORMAT.BC3_UNORM):
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "BC3"
|
||||
n = 3
|
||||
elif dxgi_format in (DXGI_FORMAT.BC4_TYPELESS, DXGI_FORMAT.BC4_UNORM):
|
||||
self._mode = "L"
|
||||
self.pixel_format = "BC4"
|
||||
n = 4
|
||||
elif dxgi_format in (DXGI_FORMAT.BC5_TYPELESS, DXGI_FORMAT.BC5_UNORM):
|
||||
self._mode = "RGB"
|
||||
self.pixel_format = "BC5"
|
||||
n = 5
|
||||
elif dxgi_format == DXGI_FORMAT.BC5_SNORM:
|
||||
self._mode = "RGB"
|
||||
self.pixel_format = "BC5S"
|
||||
n = 5
|
||||
elif dxgi_format == DXGI_FORMAT.BC6H_UF16:
|
||||
self._mode = "RGB"
|
||||
self.pixel_format = "BC6H"
|
||||
n = 6
|
||||
elif dxgi_format == DXGI_FORMAT.BC6H_SF16:
|
||||
self._mode = "RGB"
|
||||
self.pixel_format = "BC6HS"
|
||||
n = 6
|
||||
elif dxgi_format in (
|
||||
DXGI_FORMAT.BC7_TYPELESS,
|
||||
DXGI_FORMAT.BC7_UNORM,
|
||||
DXGI_FORMAT.BC7_UNORM_SRGB,
|
||||
):
|
||||
self._mode = "RGBA"
|
||||
self.pixel_format = "BC7"
|
||||
n = 7
|
||||
if dxgi_format == DXGI_FORMAT.BC7_UNORM_SRGB:
|
||||
self.info["gamma"] = 1 / 2.2
|
||||
elif dxgi_format in (
|
||||
DXGI_FORMAT.R8G8B8A8_TYPELESS,
|
||||
DXGI_FORMAT.R8G8B8A8_UNORM,
|
||||
DXGI_FORMAT.R8G8B8A8_UNORM_SRGB,
|
||||
):
|
||||
self._mode = "RGBA"
|
||||
if dxgi_format == DXGI_FORMAT.R8G8B8A8_UNORM_SRGB:
|
||||
self.info["gamma"] = 1 / 2.2
|
||||
else:
|
||||
msg = f"Unimplemented DXGI format {dxgi_format}"
|
||||
raise NotImplementedError(msg)
|
||||
else:
|
||||
msg = f"Unimplemented pixel format {repr(fourcc)}"
|
||||
raise NotImplementedError(msg)
|
||||
else:
|
||||
msg = f"Unknown pixel format flags {pfflags}"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
if n:
|
||||
self.tile = [
|
||||
ImageFile._Tile("bcn", extents, offset, (n, self.pixel_format))
|
||||
]
|
||||
else:
|
||||
self.tile = [ImageFile._Tile("raw", extents, 0, rawmode or self.mode)]
|
||||
|
||||
def load_seek(self, pos: int) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class DdsRgbDecoder(ImageFile.PyDecoder):
|
||||
_pulls_fd = True
|
||||
|
||||
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
||||
assert self.fd is not None
|
||||
bitcount, masks = self.args
|
||||
|
||||
# Some masks will be padded with zeros, e.g. R 0b11 G 0b1100
|
||||
# Calculate how many zeros each mask is padded with
|
||||
mask_offsets = []
|
||||
# And the maximum value of each channel without the padding
|
||||
mask_totals = []
|
||||
for mask in masks:
|
||||
offset = 0
|
||||
if mask != 0:
|
||||
while mask >> (offset + 1) << (offset + 1) == mask:
|
||||
offset += 1
|
||||
mask_offsets.append(offset)
|
||||
mask_totals.append(mask >> offset)
|
||||
|
||||
data = bytearray()
|
||||
bytecount = bitcount // 8
|
||||
dest_length = self.state.xsize * self.state.ysize * len(masks)
|
||||
while len(data) < dest_length:
|
||||
value = int.from_bytes(self.fd.read(bytecount), "little")
|
||||
for i, mask in enumerate(masks):
|
||||
masked_value = value & mask
|
||||
# Remove the zero padding, and scale it to 8 bits
|
||||
data += o8(
|
||||
int(((masked_value >> mask_offsets[i]) / mask_totals[i]) * 255)
|
||||
if mask_totals[i]
|
||||
else 0
|
||||
)
|
||||
self.set_as_raw(data)
|
||||
return -1, 0
|
||||
|
||||
|
||||
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
||||
if im.mode not in ("RGB", "RGBA", "L", "LA"):
|
||||
msg = f"cannot write mode {im.mode} as DDS"
|
||||
raise OSError(msg)
|
||||
|
||||
flags = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PIXELFORMAT
|
||||
bitcount = len(im.getbands()) * 8
|
||||
pixel_format = im.encoderinfo.get("pixel_format")
|
||||
args: tuple[int] | str
|
||||
if pixel_format:
|
||||
codec_name = "bcn"
|
||||
flags |= DDSD.LINEARSIZE
|
||||
pitch = (im.width + 3) * 4
|
||||
rgba_mask = [0, 0, 0, 0]
|
||||
pixel_flags = DDPF.FOURCC
|
||||
if pixel_format == "DXT1":
|
||||
fourcc = D3DFMT.DXT1
|
||||
args = (1,)
|
||||
elif pixel_format == "DXT3":
|
||||
fourcc = D3DFMT.DXT3
|
||||
args = (2,)
|
||||
elif pixel_format == "DXT5":
|
||||
fourcc = D3DFMT.DXT5
|
||||
args = (3,)
|
||||
else:
|
||||
fourcc = D3DFMT.DX10
|
||||
if pixel_format == "BC2":
|
||||
args = (2,)
|
||||
dxgi_format = DXGI_FORMAT.BC2_TYPELESS
|
||||
elif pixel_format == "BC3":
|
||||
args = (3,)
|
||||
dxgi_format = DXGI_FORMAT.BC3_TYPELESS
|
||||
elif pixel_format == "BC5":
|
||||
args = (5,)
|
||||
dxgi_format = DXGI_FORMAT.BC5_TYPELESS
|
||||
if im.mode != "RGB":
|
||||
msg = "only RGB mode can be written as BC5"
|
||||
raise OSError(msg)
|
||||
else:
|
||||
msg = f"cannot write pixel format {pixel_format}"
|
||||
raise OSError(msg)
|
||||
else:
|
||||
codec_name = "raw"
|
||||
flags |= DDSD.PITCH
|
||||
pitch = (im.width * bitcount + 7) // 8
|
||||
|
||||
alpha = im.mode[-1] == "A"
|
||||
if im.mode[0] == "L":
|
||||
pixel_flags = DDPF.LUMINANCE
|
||||
args = im.mode
|
||||
if alpha:
|
||||
rgba_mask = [0x000000FF, 0x000000FF, 0x000000FF]
|
||||
else:
|
||||
rgba_mask = [0xFF000000, 0xFF000000, 0xFF000000]
|
||||
else:
|
||||
pixel_flags = DDPF.RGB
|
||||
args = im.mode[::-1]
|
||||
rgba_mask = [0x00FF0000, 0x0000FF00, 0x000000FF]
|
||||
|
||||
if alpha:
|
||||
r, g, b, a = im.split()
|
||||
im = Image.merge("RGBA", (a, r, g, b))
|
||||
if alpha:
|
||||
pixel_flags |= DDPF.ALPHAPIXELS
|
||||
rgba_mask.append(0xFF000000 if alpha else 0)
|
||||
|
||||
fourcc = D3DFMT.UNKNOWN
|
||||
fp.write(
|
||||
o32(DDS_MAGIC)
|
||||
+ struct.pack(
|
||||
"<7I",
|
||||
124, # header size
|
||||
flags, # flags
|
||||
im.height,
|
||||
im.width,
|
||||
pitch,
|
||||
0, # depth
|
||||
0, # mipmaps
|
||||
)
|
||||
+ struct.pack("11I", *((0,) * 11)) # reserved
|
||||
# pfsize, pfflags, fourcc, bitcount
|
||||
+ struct.pack("<4I", 32, pixel_flags, fourcc, bitcount)
|
||||
+ struct.pack("<4I", *rgba_mask) # dwRGBABitMask
|
||||
+ struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0)
|
||||
)
|
||||
if fourcc == D3DFMT.DX10:
|
||||
fp.write(
|
||||
# dxgi_format, 2D resource, misc, array size, straight alpha
|
||||
struct.pack("<5I", dxgi_format, 3, 0, 0, 1)
|
||||
)
|
||||
ImageFile._save(im, fp, [ImageFile._Tile(codec_name, (0, 0) + im.size, 0, args)])
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(b"DDS ")
|
||||
|
||||
|
||||
Image.register_open(DdsImageFile.format, DdsImageFile, _accept)
|
||||
Image.register_decoder("dds_rgb", DdsRgbDecoder)
|
||||
Image.register_save(DdsImageFile.format, _save)
|
||||
Image.register_extension(DdsImageFile.format, ".dds")
|
||||
481
venv/lib/python3.11/site-packages/PIL/EpsImagePlugin.py
Normal file
481
venv/lib/python3.11/site-packages/PIL/EpsImagePlugin.py
Normal file
@ -0,0 +1,481 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# EPS file handling
|
||||
#
|
||||
# History:
|
||||
# 1995-09-01 fl Created (0.1)
|
||||
# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2)
|
||||
# 1996-08-22 fl Don't choke on floating point BoundingBox values
|
||||
# 1996-08-23 fl Handle files from Macintosh (0.3)
|
||||
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
|
||||
# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5)
|
||||
# 2014-05-07 e Handling of EPS with binary preview and fixed resolution
|
||||
# resizing
|
||||
#
|
||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
||||
# Copyright (c) 1995-2003 by Fredrik Lundh
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import IO
|
||||
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i32le as i32
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$")
|
||||
field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$")
|
||||
|
||||
gs_binary: str | bool | None = None
|
||||
gs_windows_binary = None
|
||||
|
||||
|
||||
def has_ghostscript() -> bool:
|
||||
global gs_binary, gs_windows_binary
|
||||
if gs_binary is None:
|
||||
if sys.platform.startswith("win"):
|
||||
if gs_windows_binary is None:
|
||||
import shutil
|
||||
|
||||
for binary in ("gswin32c", "gswin64c", "gs"):
|
||||
if shutil.which(binary) is not None:
|
||||
gs_windows_binary = binary
|
||||
break
|
||||
else:
|
||||
gs_windows_binary = False
|
||||
gs_binary = gs_windows_binary
|
||||
else:
|
||||
try:
|
||||
subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL)
|
||||
gs_binary = "gs"
|
||||
except OSError:
|
||||
gs_binary = False
|
||||
return gs_binary is not False
|
||||
|
||||
|
||||
def Ghostscript(
|
||||
tile: list[ImageFile._Tile],
|
||||
size: tuple[int, int],
|
||||
fp: IO[bytes],
|
||||
scale: int = 1,
|
||||
transparency: bool = False,
|
||||
) -> Image.core.ImagingCore:
|
||||
"""Render an image using Ghostscript"""
|
||||
global gs_binary
|
||||
if not has_ghostscript():
|
||||
msg = "Unable to locate Ghostscript on paths"
|
||||
raise OSError(msg)
|
||||
assert isinstance(gs_binary, str)
|
||||
|
||||
# Unpack decoder tile
|
||||
args = tile[0].args
|
||||
assert isinstance(args, tuple)
|
||||
length, bbox = args
|
||||
|
||||
# Hack to support hi-res rendering
|
||||
scale = int(scale) or 1
|
||||
width = size[0] * scale
|
||||
height = size[1] * scale
|
||||
# resolution is dependent on bbox and size
|
||||
res_x = 72.0 * width / (bbox[2] - bbox[0])
|
||||
res_y = 72.0 * height / (bbox[3] - bbox[1])
|
||||
|
||||
out_fd, outfile = tempfile.mkstemp()
|
||||
os.close(out_fd)
|
||||
|
||||
infile_temp = None
|
||||
if hasattr(fp, "name") and os.path.exists(fp.name):
|
||||
infile = fp.name
|
||||
else:
|
||||
in_fd, infile_temp = tempfile.mkstemp()
|
||||
os.close(in_fd)
|
||||
infile = infile_temp
|
||||
|
||||
# Ignore length and offset!
|
||||
# Ghostscript can read it
|
||||
# Copy whole file to read in Ghostscript
|
||||
with open(infile_temp, "wb") as f:
|
||||
# fetch length of fp
|
||||
fp.seek(0, io.SEEK_END)
|
||||
fsize = fp.tell()
|
||||
# ensure start position
|
||||
# go back
|
||||
fp.seek(0)
|
||||
lengthfile = fsize
|
||||
while lengthfile > 0:
|
||||
s = fp.read(min(lengthfile, 100 * 1024))
|
||||
if not s:
|
||||
break
|
||||
lengthfile -= len(s)
|
||||
f.write(s)
|
||||
|
||||
if transparency:
|
||||
# "RGBA"
|
||||
device = "pngalpha"
|
||||
else:
|
||||
# "pnmraw" automatically chooses between
|
||||
# PBM ("1"), PGM ("L"), and PPM ("RGB").
|
||||
device = "pnmraw"
|
||||
|
||||
# Build Ghostscript command
|
||||
command = [
|
||||
gs_binary,
|
||||
"-q", # quiet mode
|
||||
f"-g{width:d}x{height:d}", # set output geometry (pixels)
|
||||
f"-r{res_x:f}x{res_y:f}", # set input DPI (dots per inch)
|
||||
"-dBATCH", # exit after processing
|
||||
"-dNOPAUSE", # don't pause between pages
|
||||
"-dSAFER", # safe mode
|
||||
f"-sDEVICE={device}",
|
||||
f"-sOutputFile={outfile}", # output file
|
||||
# adjust for image origin
|
||||
"-c",
|
||||
f"{-bbox[0]} {-bbox[1]} translate",
|
||||
"-f",
|
||||
infile, # input file
|
||||
# showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272)
|
||||
"-c",
|
||||
"showpage",
|
||||
]
|
||||
|
||||
# push data through Ghostscript
|
||||
try:
|
||||
startupinfo = None
|
||||
if sys.platform.startswith("win"):
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
subprocess.check_call(command, startupinfo=startupinfo)
|
||||
with Image.open(outfile) as out_im:
|
||||
out_im.load()
|
||||
return out_im.im.copy()
|
||||
finally:
|
||||
try:
|
||||
os.unlink(outfile)
|
||||
if infile_temp:
|
||||
os.unlink(infile_temp)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(b"%!PS") or (
|
||||
len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5
|
||||
)
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for Encapsulated PostScript. This plugin supports only
|
||||
# a few variants of this format.
|
||||
|
||||
|
||||
class EpsImageFile(ImageFile.ImageFile):
|
||||
"""EPS File Parser for the Python Imaging Library"""
|
||||
|
||||
format = "EPS"
|
||||
format_description = "Encapsulated Postscript"
|
||||
|
||||
mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"}
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
(length, offset) = self._find_offset(self.fp)
|
||||
|
||||
# go to offset - start of "%!PS"
|
||||
self.fp.seek(offset)
|
||||
|
||||
self._mode = "RGB"
|
||||
|
||||
# When reading header comments, the first comment is used.
|
||||
# When reading trailer comments, the last comment is used.
|
||||
bounding_box: list[int] | None = None
|
||||
imagedata_size: tuple[int, int] | None = None
|
||||
|
||||
byte_arr = bytearray(255)
|
||||
bytes_mv = memoryview(byte_arr)
|
||||
bytes_read = 0
|
||||
reading_header_comments = True
|
||||
reading_trailer_comments = False
|
||||
trailer_reached = False
|
||||
|
||||
def check_required_header_comments() -> None:
|
||||
"""
|
||||
The EPS specification requires that some headers exist.
|
||||
This should be checked when the header comments formally end,
|
||||
when image data starts, or when the file ends, whichever comes first.
|
||||
"""
|
||||
if "PS-Adobe" not in self.info:
|
||||
msg = 'EPS header missing "%!PS-Adobe" comment'
|
||||
raise SyntaxError(msg)
|
||||
if "BoundingBox" not in self.info:
|
||||
msg = 'EPS header missing "%%BoundingBox" comment'
|
||||
raise SyntaxError(msg)
|
||||
|
||||
def read_comment(s: str) -> bool:
|
||||
nonlocal bounding_box, reading_trailer_comments
|
||||
try:
|
||||
m = split.match(s)
|
||||
except re.error as e:
|
||||
msg = "not an EPS file"
|
||||
raise SyntaxError(msg) from e
|
||||
|
||||
if not m:
|
||||
return False
|
||||
|
||||
k, v = m.group(1, 2)
|
||||
self.info[k] = v
|
||||
if k == "BoundingBox":
|
||||
if v == "(atend)":
|
||||
reading_trailer_comments = True
|
||||
elif not bounding_box or (trailer_reached and reading_trailer_comments):
|
||||
try:
|
||||
# Note: The DSC spec says that BoundingBox
|
||||
# fields should be integers, but some drivers
|
||||
# put floating point values there anyway.
|
||||
bounding_box = [int(float(i)) for i in v.split()]
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
|
||||
while True:
|
||||
byte = self.fp.read(1)
|
||||
if byte == b"":
|
||||
# if we didn't read a byte we must be at the end of the file
|
||||
if bytes_read == 0:
|
||||
if reading_header_comments:
|
||||
check_required_header_comments()
|
||||
break
|
||||
elif byte in b"\r\n":
|
||||
# if we read a line ending character, ignore it and parse what
|
||||
# we have already read. if we haven't read any other characters,
|
||||
# continue reading
|
||||
if bytes_read == 0:
|
||||
continue
|
||||
else:
|
||||
# ASCII/hexadecimal lines in an EPS file must not exceed
|
||||
# 255 characters, not including line ending characters
|
||||
if bytes_read >= 255:
|
||||
# only enforce this for lines starting with a "%",
|
||||
# otherwise assume it's binary data
|
||||
if byte_arr[0] == ord("%"):
|
||||
msg = "not an EPS file"
|
||||
raise SyntaxError(msg)
|
||||
else:
|
||||
if reading_header_comments:
|
||||
check_required_header_comments()
|
||||
reading_header_comments = False
|
||||
# reset bytes_read so we can keep reading
|
||||
# data until the end of the line
|
||||
bytes_read = 0
|
||||
byte_arr[bytes_read] = byte[0]
|
||||
bytes_read += 1
|
||||
continue
|
||||
|
||||
if reading_header_comments:
|
||||
# Load EPS header
|
||||
|
||||
# if this line doesn't start with a "%",
|
||||
# or does start with "%%EndComments",
|
||||
# then we've reached the end of the header/comments
|
||||
if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments":
|
||||
check_required_header_comments()
|
||||
reading_header_comments = False
|
||||
continue
|
||||
|
||||
s = str(bytes_mv[:bytes_read], "latin-1")
|
||||
if not read_comment(s):
|
||||
m = field.match(s)
|
||||
if m:
|
||||
k = m.group(1)
|
||||
if k.startswith("PS-Adobe"):
|
||||
self.info["PS-Adobe"] = k[9:]
|
||||
else:
|
||||
self.info[k] = ""
|
||||
elif s[0] == "%":
|
||||
# handle non-DSC PostScript comments that some
|
||||
# tools mistakenly put in the Comments section
|
||||
pass
|
||||
else:
|
||||
msg = "bad EPS header"
|
||||
raise OSError(msg)
|
||||
elif bytes_mv[:11] == b"%ImageData:":
|
||||
# Check for an "ImageData" descriptor
|
||||
# https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096
|
||||
|
||||
# If we've already read an "ImageData" descriptor,
|
||||
# don't read another one.
|
||||
if imagedata_size:
|
||||
bytes_read = 0
|
||||
continue
|
||||
|
||||
# Values:
|
||||
# columns
|
||||
# rows
|
||||
# bit depth (1 or 8)
|
||||
# mode (1: L, 2: LAB, 3: RGB, 4: CMYK)
|
||||
# number of padding channels
|
||||
# block size (number of bytes per row per channel)
|
||||
# binary/ascii (1: binary, 2: ascii)
|
||||
# data start identifier (the image data follows after a single line
|
||||
# consisting only of this quoted value)
|
||||
image_data_values = byte_arr[11:bytes_read].split(None, 7)
|
||||
columns, rows, bit_depth, mode_id = (
|
||||
int(value) for value in image_data_values[:4]
|
||||
)
|
||||
|
||||
if bit_depth == 1:
|
||||
self._mode = "1"
|
||||
elif bit_depth == 8:
|
||||
try:
|
||||
self._mode = self.mode_map[mode_id]
|
||||
except ValueError:
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
# Parse the columns and rows after checking the bit depth and mode
|
||||
# in case the bit depth and/or mode are invalid.
|
||||
imagedata_size = columns, rows
|
||||
elif bytes_mv[:5] == b"%%EOF":
|
||||
break
|
||||
elif trailer_reached and reading_trailer_comments:
|
||||
# Load EPS trailer
|
||||
s = str(bytes_mv[:bytes_read], "latin-1")
|
||||
read_comment(s)
|
||||
elif bytes_mv[:9] == b"%%Trailer":
|
||||
trailer_reached = True
|
||||
elif bytes_mv[:14] == b"%%BeginBinary:":
|
||||
bytecount = int(byte_arr[14:bytes_read])
|
||||
self.fp.seek(bytecount, os.SEEK_CUR)
|
||||
bytes_read = 0
|
||||
|
||||
# A "BoundingBox" is always required,
|
||||
# even if an "ImageData" descriptor size exists.
|
||||
if not bounding_box:
|
||||
msg = "cannot determine EPS bounding box"
|
||||
raise OSError(msg)
|
||||
|
||||
# An "ImageData" size takes precedence over the "BoundingBox".
|
||||
self._size = imagedata_size or (
|
||||
bounding_box[2] - bounding_box[0],
|
||||
bounding_box[3] - bounding_box[1],
|
||||
)
|
||||
|
||||
self.tile = [
|
||||
ImageFile._Tile("eps", (0, 0) + self.size, offset, (length, bounding_box))
|
||||
]
|
||||
|
||||
def _find_offset(self, fp: IO[bytes]) -> tuple[int, int]:
|
||||
s = fp.read(4)
|
||||
|
||||
if s == b"%!PS":
|
||||
# for HEAD without binary preview
|
||||
fp.seek(0, io.SEEK_END)
|
||||
length = fp.tell()
|
||||
offset = 0
|
||||
elif i32(s) == 0xC6D3D0C5:
|
||||
# FIX for: Some EPS file not handled correctly / issue #302
|
||||
# EPS can contain binary data
|
||||
# or start directly with latin coding
|
||||
# more info see:
|
||||
# https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf
|
||||
s = fp.read(8)
|
||||
offset = i32(s)
|
||||
length = i32(s, 4)
|
||||
else:
|
||||
msg = "not an EPS file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
return length, offset
|
||||
|
||||
def load(
|
||||
self, scale: int = 1, transparency: bool = False
|
||||
) -> Image.core.PixelAccess | None:
|
||||
# Load EPS via Ghostscript
|
||||
if self.tile:
|
||||
assert self.fp is not None
|
||||
self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency)
|
||||
self._mode = self.im.mode
|
||||
self._size = self.im.size
|
||||
self.tile = []
|
||||
return Image.Image.load(self)
|
||||
|
||||
def load_seek(self, pos: int) -> None:
|
||||
# we can't incrementally load, so force ImageFile.parser to
|
||||
# use our custom load method by defining this method.
|
||||
pass
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes, eps: int = 1) -> None:
|
||||
"""EPS Writer for the Python Imaging Library."""
|
||||
|
||||
# make sure image data is available
|
||||
im.load()
|
||||
|
||||
# determine PostScript image mode
|
||||
if im.mode == "L":
|
||||
operator = (8, 1, b"image")
|
||||
elif im.mode == "RGB":
|
||||
operator = (8, 3, b"false 3 colorimage")
|
||||
elif im.mode == "CMYK":
|
||||
operator = (8, 4, b"false 4 colorimage")
|
||||
else:
|
||||
msg = "image mode is not supported"
|
||||
raise ValueError(msg)
|
||||
|
||||
if eps:
|
||||
# write EPS header
|
||||
fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n")
|
||||
fp.write(b"%%Creator: PIL 0.1 EpsEncode\n")
|
||||
# fp.write("%%CreationDate: %s"...)
|
||||
fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size)
|
||||
fp.write(b"%%Pages: 1\n")
|
||||
fp.write(b"%%EndComments\n")
|
||||
fp.write(b"%%Page: 1 1\n")
|
||||
fp.write(b"%%ImageData: %d %d " % im.size)
|
||||
fp.write(b'%d %d 0 1 1 "%s"\n' % operator)
|
||||
|
||||
# image header
|
||||
fp.write(b"gsave\n")
|
||||
fp.write(b"10 dict begin\n")
|
||||
fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1]))
|
||||
fp.write(b"%d %d scale\n" % im.size)
|
||||
fp.write(b"%d %d 8\n" % im.size) # <= bits
|
||||
fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
|
||||
fp.write(b"{ currentfile buf readhexstring pop } bind\n")
|
||||
fp.write(operator[2] + b"\n")
|
||||
if hasattr(fp, "flush"):
|
||||
fp.flush()
|
||||
|
||||
ImageFile._save(im, fp, [ImageFile._Tile("eps", (0, 0) + im.size)])
|
||||
|
||||
fp.write(b"\n%%%%EndBinary\n")
|
||||
fp.write(b"grestore end\n")
|
||||
if hasattr(fp, "flush"):
|
||||
fp.flush()
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
Image.register_open(EpsImageFile.format, EpsImageFile, _accept)
|
||||
|
||||
Image.register_save(EpsImageFile.format, _save)
|
||||
|
||||
Image.register_extensions(EpsImageFile.format, [".ps", ".eps"])
|
||||
|
||||
Image.register_mime(EpsImageFile.format, "application/postscript")
|
||||
382
venv/lib/python3.11/site-packages/PIL/ExifTags.py
Normal file
382
venv/lib/python3.11/site-packages/PIL/ExifTags.py
Normal file
@ -0,0 +1,382 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# EXIF tags
|
||||
#
|
||||
# Copyright (c) 2003 by Secret Labs AB
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
|
||||
"""
|
||||
This module provides constants and clear-text names for various
|
||||
well-known EXIF tags.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class Base(IntEnum):
|
||||
# possibly incomplete
|
||||
InteropIndex = 0x0001
|
||||
ProcessingSoftware = 0x000B
|
||||
NewSubfileType = 0x00FE
|
||||
SubfileType = 0x00FF
|
||||
ImageWidth = 0x0100
|
||||
ImageLength = 0x0101
|
||||
BitsPerSample = 0x0102
|
||||
Compression = 0x0103
|
||||
PhotometricInterpretation = 0x0106
|
||||
Thresholding = 0x0107
|
||||
CellWidth = 0x0108
|
||||
CellLength = 0x0109
|
||||
FillOrder = 0x010A
|
||||
DocumentName = 0x010D
|
||||
ImageDescription = 0x010E
|
||||
Make = 0x010F
|
||||
Model = 0x0110
|
||||
StripOffsets = 0x0111
|
||||
Orientation = 0x0112
|
||||
SamplesPerPixel = 0x0115
|
||||
RowsPerStrip = 0x0116
|
||||
StripByteCounts = 0x0117
|
||||
MinSampleValue = 0x0118
|
||||
MaxSampleValue = 0x0119
|
||||
XResolution = 0x011A
|
||||
YResolution = 0x011B
|
||||
PlanarConfiguration = 0x011C
|
||||
PageName = 0x011D
|
||||
FreeOffsets = 0x0120
|
||||
FreeByteCounts = 0x0121
|
||||
GrayResponseUnit = 0x0122
|
||||
GrayResponseCurve = 0x0123
|
||||
T4Options = 0x0124
|
||||
T6Options = 0x0125
|
||||
ResolutionUnit = 0x0128
|
||||
PageNumber = 0x0129
|
||||
TransferFunction = 0x012D
|
||||
Software = 0x0131
|
||||
DateTime = 0x0132
|
||||
Artist = 0x013B
|
||||
HostComputer = 0x013C
|
||||
Predictor = 0x013D
|
||||
WhitePoint = 0x013E
|
||||
PrimaryChromaticities = 0x013F
|
||||
ColorMap = 0x0140
|
||||
HalftoneHints = 0x0141
|
||||
TileWidth = 0x0142
|
||||
TileLength = 0x0143
|
||||
TileOffsets = 0x0144
|
||||
TileByteCounts = 0x0145
|
||||
SubIFDs = 0x014A
|
||||
InkSet = 0x014C
|
||||
InkNames = 0x014D
|
||||
NumberOfInks = 0x014E
|
||||
DotRange = 0x0150
|
||||
TargetPrinter = 0x0151
|
||||
ExtraSamples = 0x0152
|
||||
SampleFormat = 0x0153
|
||||
SMinSampleValue = 0x0154
|
||||
SMaxSampleValue = 0x0155
|
||||
TransferRange = 0x0156
|
||||
ClipPath = 0x0157
|
||||
XClipPathUnits = 0x0158
|
||||
YClipPathUnits = 0x0159
|
||||
Indexed = 0x015A
|
||||
JPEGTables = 0x015B
|
||||
OPIProxy = 0x015F
|
||||
JPEGProc = 0x0200
|
||||
JpegIFOffset = 0x0201
|
||||
JpegIFByteCount = 0x0202
|
||||
JpegRestartInterval = 0x0203
|
||||
JpegLosslessPredictors = 0x0205
|
||||
JpegPointTransforms = 0x0206
|
||||
JpegQTables = 0x0207
|
||||
JpegDCTables = 0x0208
|
||||
JpegACTables = 0x0209
|
||||
YCbCrCoefficients = 0x0211
|
||||
YCbCrSubSampling = 0x0212
|
||||
YCbCrPositioning = 0x0213
|
||||
ReferenceBlackWhite = 0x0214
|
||||
XMLPacket = 0x02BC
|
||||
RelatedImageFileFormat = 0x1000
|
||||
RelatedImageWidth = 0x1001
|
||||
RelatedImageLength = 0x1002
|
||||
Rating = 0x4746
|
||||
RatingPercent = 0x4749
|
||||
ImageID = 0x800D
|
||||
CFARepeatPatternDim = 0x828D
|
||||
BatteryLevel = 0x828F
|
||||
Copyright = 0x8298
|
||||
ExposureTime = 0x829A
|
||||
FNumber = 0x829D
|
||||
IPTCNAA = 0x83BB
|
||||
ImageResources = 0x8649
|
||||
ExifOffset = 0x8769
|
||||
InterColorProfile = 0x8773
|
||||
ExposureProgram = 0x8822
|
||||
SpectralSensitivity = 0x8824
|
||||
GPSInfo = 0x8825
|
||||
ISOSpeedRatings = 0x8827
|
||||
OECF = 0x8828
|
||||
Interlace = 0x8829
|
||||
TimeZoneOffset = 0x882A
|
||||
SelfTimerMode = 0x882B
|
||||
SensitivityType = 0x8830
|
||||
StandardOutputSensitivity = 0x8831
|
||||
RecommendedExposureIndex = 0x8832
|
||||
ISOSpeed = 0x8833
|
||||
ISOSpeedLatitudeyyy = 0x8834
|
||||
ISOSpeedLatitudezzz = 0x8835
|
||||
ExifVersion = 0x9000
|
||||
DateTimeOriginal = 0x9003
|
||||
DateTimeDigitized = 0x9004
|
||||
OffsetTime = 0x9010
|
||||
OffsetTimeOriginal = 0x9011
|
||||
OffsetTimeDigitized = 0x9012
|
||||
ComponentsConfiguration = 0x9101
|
||||
CompressedBitsPerPixel = 0x9102
|
||||
ShutterSpeedValue = 0x9201
|
||||
ApertureValue = 0x9202
|
||||
BrightnessValue = 0x9203
|
||||
ExposureBiasValue = 0x9204
|
||||
MaxApertureValue = 0x9205
|
||||
SubjectDistance = 0x9206
|
||||
MeteringMode = 0x9207
|
||||
LightSource = 0x9208
|
||||
Flash = 0x9209
|
||||
FocalLength = 0x920A
|
||||
Noise = 0x920D
|
||||
ImageNumber = 0x9211
|
||||
SecurityClassification = 0x9212
|
||||
ImageHistory = 0x9213
|
||||
TIFFEPStandardID = 0x9216
|
||||
MakerNote = 0x927C
|
||||
UserComment = 0x9286
|
||||
SubsecTime = 0x9290
|
||||
SubsecTimeOriginal = 0x9291
|
||||
SubsecTimeDigitized = 0x9292
|
||||
AmbientTemperature = 0x9400
|
||||
Humidity = 0x9401
|
||||
Pressure = 0x9402
|
||||
WaterDepth = 0x9403
|
||||
Acceleration = 0x9404
|
||||
CameraElevationAngle = 0x9405
|
||||
XPTitle = 0x9C9B
|
||||
XPComment = 0x9C9C
|
||||
XPAuthor = 0x9C9D
|
||||
XPKeywords = 0x9C9E
|
||||
XPSubject = 0x9C9F
|
||||
FlashPixVersion = 0xA000
|
||||
ColorSpace = 0xA001
|
||||
ExifImageWidth = 0xA002
|
||||
ExifImageHeight = 0xA003
|
||||
RelatedSoundFile = 0xA004
|
||||
ExifInteroperabilityOffset = 0xA005
|
||||
FlashEnergy = 0xA20B
|
||||
SpatialFrequencyResponse = 0xA20C
|
||||
FocalPlaneXResolution = 0xA20E
|
||||
FocalPlaneYResolution = 0xA20F
|
||||
FocalPlaneResolutionUnit = 0xA210
|
||||
SubjectLocation = 0xA214
|
||||
ExposureIndex = 0xA215
|
||||
SensingMethod = 0xA217
|
||||
FileSource = 0xA300
|
||||
SceneType = 0xA301
|
||||
CFAPattern = 0xA302
|
||||
CustomRendered = 0xA401
|
||||
ExposureMode = 0xA402
|
||||
WhiteBalance = 0xA403
|
||||
DigitalZoomRatio = 0xA404
|
||||
FocalLengthIn35mmFilm = 0xA405
|
||||
SceneCaptureType = 0xA406
|
||||
GainControl = 0xA407
|
||||
Contrast = 0xA408
|
||||
Saturation = 0xA409
|
||||
Sharpness = 0xA40A
|
||||
DeviceSettingDescription = 0xA40B
|
||||
SubjectDistanceRange = 0xA40C
|
||||
ImageUniqueID = 0xA420
|
||||
CameraOwnerName = 0xA430
|
||||
BodySerialNumber = 0xA431
|
||||
LensSpecification = 0xA432
|
||||
LensMake = 0xA433
|
||||
LensModel = 0xA434
|
||||
LensSerialNumber = 0xA435
|
||||
CompositeImage = 0xA460
|
||||
CompositeImageCount = 0xA461
|
||||
CompositeImageExposureTimes = 0xA462
|
||||
Gamma = 0xA500
|
||||
PrintImageMatching = 0xC4A5
|
||||
DNGVersion = 0xC612
|
||||
DNGBackwardVersion = 0xC613
|
||||
UniqueCameraModel = 0xC614
|
||||
LocalizedCameraModel = 0xC615
|
||||
CFAPlaneColor = 0xC616
|
||||
CFALayout = 0xC617
|
||||
LinearizationTable = 0xC618
|
||||
BlackLevelRepeatDim = 0xC619
|
||||
BlackLevel = 0xC61A
|
||||
BlackLevelDeltaH = 0xC61B
|
||||
BlackLevelDeltaV = 0xC61C
|
||||
WhiteLevel = 0xC61D
|
||||
DefaultScale = 0xC61E
|
||||
DefaultCropOrigin = 0xC61F
|
||||
DefaultCropSize = 0xC620
|
||||
ColorMatrix1 = 0xC621
|
||||
ColorMatrix2 = 0xC622
|
||||
CameraCalibration1 = 0xC623
|
||||
CameraCalibration2 = 0xC624
|
||||
ReductionMatrix1 = 0xC625
|
||||
ReductionMatrix2 = 0xC626
|
||||
AnalogBalance = 0xC627
|
||||
AsShotNeutral = 0xC628
|
||||
AsShotWhiteXY = 0xC629
|
||||
BaselineExposure = 0xC62A
|
||||
BaselineNoise = 0xC62B
|
||||
BaselineSharpness = 0xC62C
|
||||
BayerGreenSplit = 0xC62D
|
||||
LinearResponseLimit = 0xC62E
|
||||
CameraSerialNumber = 0xC62F
|
||||
LensInfo = 0xC630
|
||||
ChromaBlurRadius = 0xC631
|
||||
AntiAliasStrength = 0xC632
|
||||
ShadowScale = 0xC633
|
||||
DNGPrivateData = 0xC634
|
||||
MakerNoteSafety = 0xC635
|
||||
CalibrationIlluminant1 = 0xC65A
|
||||
CalibrationIlluminant2 = 0xC65B
|
||||
BestQualityScale = 0xC65C
|
||||
RawDataUniqueID = 0xC65D
|
||||
OriginalRawFileName = 0xC68B
|
||||
OriginalRawFileData = 0xC68C
|
||||
ActiveArea = 0xC68D
|
||||
MaskedAreas = 0xC68E
|
||||
AsShotICCProfile = 0xC68F
|
||||
AsShotPreProfileMatrix = 0xC690
|
||||
CurrentICCProfile = 0xC691
|
||||
CurrentPreProfileMatrix = 0xC692
|
||||
ColorimetricReference = 0xC6BF
|
||||
CameraCalibrationSignature = 0xC6F3
|
||||
ProfileCalibrationSignature = 0xC6F4
|
||||
AsShotProfileName = 0xC6F6
|
||||
NoiseReductionApplied = 0xC6F7
|
||||
ProfileName = 0xC6F8
|
||||
ProfileHueSatMapDims = 0xC6F9
|
||||
ProfileHueSatMapData1 = 0xC6FA
|
||||
ProfileHueSatMapData2 = 0xC6FB
|
||||
ProfileToneCurve = 0xC6FC
|
||||
ProfileEmbedPolicy = 0xC6FD
|
||||
ProfileCopyright = 0xC6FE
|
||||
ForwardMatrix1 = 0xC714
|
||||
ForwardMatrix2 = 0xC715
|
||||
PreviewApplicationName = 0xC716
|
||||
PreviewApplicationVersion = 0xC717
|
||||
PreviewSettingsName = 0xC718
|
||||
PreviewSettingsDigest = 0xC719
|
||||
PreviewColorSpace = 0xC71A
|
||||
PreviewDateTime = 0xC71B
|
||||
RawImageDigest = 0xC71C
|
||||
OriginalRawFileDigest = 0xC71D
|
||||
SubTileBlockSize = 0xC71E
|
||||
RowInterleaveFactor = 0xC71F
|
||||
ProfileLookTableDims = 0xC725
|
||||
ProfileLookTableData = 0xC726
|
||||
OpcodeList1 = 0xC740
|
||||
OpcodeList2 = 0xC741
|
||||
OpcodeList3 = 0xC74E
|
||||
NoiseProfile = 0xC761
|
||||
|
||||
|
||||
"""Maps EXIF tags to tag names."""
|
||||
TAGS = {
|
||||
**{i.value: i.name for i in Base},
|
||||
0x920C: "SpatialFrequencyResponse",
|
||||
0x9214: "SubjectLocation",
|
||||
0x9215: "ExposureIndex",
|
||||
0x828E: "CFAPattern",
|
||||
0x920B: "FlashEnergy",
|
||||
0x9216: "TIFF/EPStandardID",
|
||||
}
|
||||
|
||||
|
||||
class GPS(IntEnum):
|
||||
GPSVersionID = 0x00
|
||||
GPSLatitudeRef = 0x01
|
||||
GPSLatitude = 0x02
|
||||
GPSLongitudeRef = 0x03
|
||||
GPSLongitude = 0x04
|
||||
GPSAltitudeRef = 0x05
|
||||
GPSAltitude = 0x06
|
||||
GPSTimeStamp = 0x07
|
||||
GPSSatellites = 0x08
|
||||
GPSStatus = 0x09
|
||||
GPSMeasureMode = 0x0A
|
||||
GPSDOP = 0x0B
|
||||
GPSSpeedRef = 0x0C
|
||||
GPSSpeed = 0x0D
|
||||
GPSTrackRef = 0x0E
|
||||
GPSTrack = 0x0F
|
||||
GPSImgDirectionRef = 0x10
|
||||
GPSImgDirection = 0x11
|
||||
GPSMapDatum = 0x12
|
||||
GPSDestLatitudeRef = 0x13
|
||||
GPSDestLatitude = 0x14
|
||||
GPSDestLongitudeRef = 0x15
|
||||
GPSDestLongitude = 0x16
|
||||
GPSDestBearingRef = 0x17
|
||||
GPSDestBearing = 0x18
|
||||
GPSDestDistanceRef = 0x19
|
||||
GPSDestDistance = 0x1A
|
||||
GPSProcessingMethod = 0x1B
|
||||
GPSAreaInformation = 0x1C
|
||||
GPSDateStamp = 0x1D
|
||||
GPSDifferential = 0x1E
|
||||
GPSHPositioningError = 0x1F
|
||||
|
||||
|
||||
"""Maps EXIF GPS tags to tag names."""
|
||||
GPSTAGS = {i.value: i.name for i in GPS}
|
||||
|
||||
|
||||
class Interop(IntEnum):
|
||||
InteropIndex = 0x0001
|
||||
InteropVersion = 0x0002
|
||||
RelatedImageFileFormat = 0x1000
|
||||
RelatedImageWidth = 0x1001
|
||||
RelatedImageHeight = 0x1002
|
||||
|
||||
|
||||
class IFD(IntEnum):
|
||||
Exif = 0x8769
|
||||
GPSInfo = 0x8825
|
||||
MakerNote = 0x927C
|
||||
Makernote = 0x927C # Deprecated
|
||||
Interop = 0xA005
|
||||
IFD1 = -1
|
||||
|
||||
|
||||
class LightSource(IntEnum):
|
||||
Unknown = 0x00
|
||||
Daylight = 0x01
|
||||
Fluorescent = 0x02
|
||||
Tungsten = 0x03
|
||||
Flash = 0x04
|
||||
Fine = 0x09
|
||||
Cloudy = 0x0A
|
||||
Shade = 0x0B
|
||||
DaylightFluorescent = 0x0C
|
||||
DayWhiteFluorescent = 0x0D
|
||||
CoolWhiteFluorescent = 0x0E
|
||||
WhiteFluorescent = 0x0F
|
||||
StandardLightA = 0x11
|
||||
StandardLightB = 0x12
|
||||
StandardLightC = 0x13
|
||||
D55 = 0x14
|
||||
D65 = 0x15
|
||||
D75 = 0x16
|
||||
D50 = 0x17
|
||||
ISO = 0x18
|
||||
Other = 0xFF
|
||||
152
venv/lib/python3.11/site-packages/PIL/FitsImagePlugin.py
Normal file
152
venv/lib/python3.11/site-packages/PIL/FitsImagePlugin.py
Normal file
@ -0,0 +1,152 @@
|
||||
#
|
||||
# The Python Imaging Library
|
||||
# $Id$
|
||||
#
|
||||
# FITS file handling
|
||||
#
|
||||
# Copyright (c) 1998-2003 by Fredrik Lundh
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import gzip
|
||||
import math
|
||||
|
||||
from . import Image, ImageFile
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(b"SIMPLE")
|
||||
|
||||
|
||||
class FitsImageFile(ImageFile.ImageFile):
|
||||
format = "FITS"
|
||||
format_description = "FITS"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
|
||||
headers: dict[bytes, bytes] = {}
|
||||
header_in_progress = False
|
||||
decoder_name = ""
|
||||
while True:
|
||||
header = self.fp.read(80)
|
||||
if not header:
|
||||
msg = "Truncated FITS file"
|
||||
raise OSError(msg)
|
||||
keyword = header[:8].strip()
|
||||
if keyword in (b"SIMPLE", b"XTENSION"):
|
||||
header_in_progress = True
|
||||
elif headers and not header_in_progress:
|
||||
# This is now a data unit
|
||||
break
|
||||
elif keyword == b"END":
|
||||
# Seek to the end of the header unit
|
||||
self.fp.seek(math.ceil(self.fp.tell() / 2880) * 2880)
|
||||
if not decoder_name:
|
||||
decoder_name, offset, args = self._parse_headers(headers)
|
||||
|
||||
header_in_progress = False
|
||||
continue
|
||||
|
||||
if decoder_name:
|
||||
# Keep going to read past the headers
|
||||
continue
|
||||
|
||||
value = header[8:].split(b"/")[0].strip()
|
||||
if value.startswith(b"="):
|
||||
value = value[1:].strip()
|
||||
if not headers and (not _accept(keyword) or value != b"T"):
|
||||
msg = "Not a FITS file"
|
||||
raise SyntaxError(msg)
|
||||
headers[keyword] = value
|
||||
|
||||
if not decoder_name:
|
||||
msg = "No image data"
|
||||
raise ValueError(msg)
|
||||
|
||||
offset += self.fp.tell() - 80
|
||||
self.tile = [ImageFile._Tile(decoder_name, (0, 0) + self.size, offset, args)]
|
||||
|
||||
def _get_size(
|
||||
self, headers: dict[bytes, bytes], prefix: bytes
|
||||
) -> tuple[int, int] | None:
|
||||
naxis = int(headers[prefix + b"NAXIS"])
|
||||
if naxis == 0:
|
||||
return None
|
||||
|
||||
if naxis == 1:
|
||||
return 1, int(headers[prefix + b"NAXIS1"])
|
||||
else:
|
||||
return int(headers[prefix + b"NAXIS1"]), int(headers[prefix + b"NAXIS2"])
|
||||
|
||||
def _parse_headers(
|
||||
self, headers: dict[bytes, bytes]
|
||||
) -> tuple[str, int, tuple[str | int, ...]]:
|
||||
prefix = b""
|
||||
decoder_name = "raw"
|
||||
offset = 0
|
||||
if (
|
||||
headers.get(b"XTENSION") == b"'BINTABLE'"
|
||||
and headers.get(b"ZIMAGE") == b"T"
|
||||
and headers[b"ZCMPTYPE"] == b"'GZIP_1 '"
|
||||
):
|
||||
no_prefix_size = self._get_size(headers, prefix) or (0, 0)
|
||||
number_of_bits = int(headers[b"BITPIX"])
|
||||
offset = no_prefix_size[0] * no_prefix_size[1] * (number_of_bits // 8)
|
||||
|
||||
prefix = b"Z"
|
||||
decoder_name = "fits_gzip"
|
||||
|
||||
size = self._get_size(headers, prefix)
|
||||
if not size:
|
||||
return "", 0, ()
|
||||
|
||||
self._size = size
|
||||
|
||||
number_of_bits = int(headers[prefix + b"BITPIX"])
|
||||
if number_of_bits == 8:
|
||||
self._mode = "L"
|
||||
elif number_of_bits == 16:
|
||||
self._mode = "I;16"
|
||||
elif number_of_bits == 32:
|
||||
self._mode = "I"
|
||||
elif number_of_bits in (-32, -64):
|
||||
self._mode = "F"
|
||||
|
||||
args: tuple[str | int, ...]
|
||||
if decoder_name == "raw":
|
||||
args = (self.mode, 0, -1)
|
||||
else:
|
||||
args = (number_of_bits,)
|
||||
return decoder_name, offset, args
|
||||
|
||||
|
||||
class FitsGzipDecoder(ImageFile.PyDecoder):
|
||||
_pulls_fd = True
|
||||
|
||||
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
||||
assert self.fd is not None
|
||||
value = gzip.decompress(self.fd.read())
|
||||
|
||||
rows = []
|
||||
offset = 0
|
||||
number_of_bits = min(self.args[0] // 8, 4)
|
||||
for y in range(self.state.ysize):
|
||||
row = bytearray()
|
||||
for x in range(self.state.xsize):
|
||||
row += value[offset + (4 - number_of_bits) : offset + 4]
|
||||
offset += 4
|
||||
rows.append(row)
|
||||
self.set_as_raw(bytes([pixel for row in rows[::-1] for pixel in row]))
|
||||
return -1, 0
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Registry
|
||||
|
||||
Image.register_open(FitsImageFile.format, FitsImageFile, _accept)
|
||||
Image.register_decoder("fits_gzip", FitsGzipDecoder)
|
||||
|
||||
Image.register_extensions(FitsImageFile.format, [".fit", ".fits"])
|
||||
184
venv/lib/python3.11/site-packages/PIL/FliImagePlugin.py
Normal file
184
venv/lib/python3.11/site-packages/PIL/FliImagePlugin.py
Normal file
@ -0,0 +1,184 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# FLI/FLC file handling.
|
||||
#
|
||||
# History:
|
||||
# 95-09-01 fl Created
|
||||
# 97-01-03 fl Fixed parser, setup decoder tile
|
||||
# 98-07-15 fl Renamed offset attribute to avoid name clash
|
||||
#
|
||||
# Copyright (c) Secret Labs AB 1997-98.
|
||||
# Copyright (c) Fredrik Lundh 1995-97.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from . import Image, ImageFile, ImagePalette
|
||||
from ._binary import i16le as i16
|
||||
from ._binary import i32le as i32
|
||||
from ._binary import o8
|
||||
from ._util import DeferredError
|
||||
|
||||
#
|
||||
# decoder
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return (
|
||||
len(prefix) >= 16
|
||||
and i16(prefix, 4) in [0xAF11, 0xAF12]
|
||||
and i16(prefix, 14) in [0, 3] # flags
|
||||
)
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for the FLI/FLC animation format. Use the <b>seek</b>
|
||||
# method to load individual frames.
|
||||
|
||||
|
||||
class FliImageFile(ImageFile.ImageFile):
|
||||
format = "FLI"
|
||||
format_description = "Autodesk FLI/FLC Animation"
|
||||
_close_exclusive_fp_after_loading = False
|
||||
|
||||
def _open(self) -> None:
|
||||
# HEAD
|
||||
assert self.fp is not None
|
||||
s = self.fp.read(128)
|
||||
if not (
|
||||
_accept(s)
|
||||
and s[20:22] == b"\x00" * 2
|
||||
and s[42:80] == b"\x00" * 38
|
||||
and s[88:] == b"\x00" * 40
|
||||
):
|
||||
msg = "not an FLI/FLC file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
# frames
|
||||
self.n_frames = i16(s, 6)
|
||||
self.is_animated = self.n_frames > 1
|
||||
|
||||
# image characteristics
|
||||
self._mode = "P"
|
||||
self._size = i16(s, 8), i16(s, 10)
|
||||
|
||||
# animation speed
|
||||
duration = i32(s, 16)
|
||||
magic = i16(s, 4)
|
||||
if magic == 0xAF11:
|
||||
duration = (duration * 1000) // 70
|
||||
self.info["duration"] = duration
|
||||
|
||||
# look for palette
|
||||
palette = [(a, a, a) for a in range(256)]
|
||||
|
||||
s = self.fp.read(16)
|
||||
|
||||
self.__offset = 128
|
||||
|
||||
if i16(s, 4) == 0xF100:
|
||||
# prefix chunk; ignore it
|
||||
self.fp.seek(self.__offset + i32(s))
|
||||
s = self.fp.read(16)
|
||||
|
||||
if i16(s, 4) == 0xF1FA:
|
||||
# look for palette chunk
|
||||
number_of_subchunks = i16(s, 6)
|
||||
chunk_size: int | None = None
|
||||
for _ in range(number_of_subchunks):
|
||||
if chunk_size is not None:
|
||||
self.fp.seek(chunk_size - 6, os.SEEK_CUR)
|
||||
s = self.fp.read(6)
|
||||
chunk_type = i16(s, 4)
|
||||
if chunk_type in (4, 11):
|
||||
self._palette(palette, 2 if chunk_type == 11 else 0)
|
||||
break
|
||||
chunk_size = i32(s)
|
||||
if not chunk_size:
|
||||
break
|
||||
|
||||
self.palette = ImagePalette.raw(
|
||||
"RGB", b"".join(o8(r) + o8(g) + o8(b) for (r, g, b) in palette)
|
||||
)
|
||||
|
||||
# set things up to decode first frame
|
||||
self.__frame = -1
|
||||
self._fp = self.fp
|
||||
self.__rewind = self.fp.tell()
|
||||
self.seek(0)
|
||||
|
||||
def _palette(self, palette: list[tuple[int, int, int]], shift: int) -> None:
|
||||
# load palette
|
||||
|
||||
i = 0
|
||||
assert self.fp is not None
|
||||
for e in range(i16(self.fp.read(2))):
|
||||
s = self.fp.read(2)
|
||||
i = i + s[0]
|
||||
n = s[1]
|
||||
if n == 0:
|
||||
n = 256
|
||||
s = self.fp.read(n * 3)
|
||||
for n in range(0, len(s), 3):
|
||||
r = s[n] << shift
|
||||
g = s[n + 1] << shift
|
||||
b = s[n + 2] << shift
|
||||
palette[i] = (r, g, b)
|
||||
i += 1
|
||||
|
||||
def seek(self, frame: int) -> None:
|
||||
if not self._seek_check(frame):
|
||||
return
|
||||
if frame < self.__frame:
|
||||
self._seek(0)
|
||||
|
||||
for f in range(self.__frame + 1, frame + 1):
|
||||
self._seek(f)
|
||||
|
||||
def _seek(self, frame: int) -> None:
|
||||
if isinstance(self._fp, DeferredError):
|
||||
raise self._fp.ex
|
||||
if frame == 0:
|
||||
self.__frame = -1
|
||||
self._fp.seek(self.__rewind)
|
||||
self.__offset = 128
|
||||
else:
|
||||
# ensure that the previous frame was loaded
|
||||
self.load()
|
||||
|
||||
if frame != self.__frame + 1:
|
||||
msg = f"cannot seek to frame {frame}"
|
||||
raise ValueError(msg)
|
||||
self.__frame = frame
|
||||
|
||||
# move to next frame
|
||||
self.fp = self._fp
|
||||
self.fp.seek(self.__offset)
|
||||
|
||||
s = self.fp.read(4)
|
||||
if not s:
|
||||
msg = "missing frame size"
|
||||
raise EOFError(msg)
|
||||
|
||||
framesize = i32(s)
|
||||
|
||||
self.decodermaxblock = framesize
|
||||
self.tile = [ImageFile._Tile("fli", (0, 0) + self.size, self.__offset)]
|
||||
|
||||
self.__offset += framesize
|
||||
|
||||
def tell(self) -> int:
|
||||
return self.__frame
|
||||
|
||||
|
||||
#
|
||||
# registry
|
||||
|
||||
Image.register_open(FliImageFile.format, FliImageFile, _accept)
|
||||
|
||||
Image.register_extensions(FliImageFile.format, [".fli", ".flc"])
|
||||
134
venv/lib/python3.11/site-packages/PIL/FontFile.py
Normal file
134
venv/lib/python3.11/site-packages/PIL/FontFile.py
Normal file
@ -0,0 +1,134 @@
|
||||
#
|
||||
# The Python Imaging Library
|
||||
# $Id$
|
||||
#
|
||||
# base class for raster font file parsers
|
||||
#
|
||||
# history:
|
||||
# 1997-06-05 fl created
|
||||
# 1997-08-19 fl restrict image width
|
||||
#
|
||||
# Copyright (c) 1997-1998 by Secret Labs AB
|
||||
# Copyright (c) 1997-1998 by Fredrik Lundh
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import BinaryIO
|
||||
|
||||
from . import Image, _binary
|
||||
|
||||
WIDTH = 800
|
||||
|
||||
|
||||
def puti16(
|
||||
fp: BinaryIO, values: tuple[int, int, int, int, int, int, int, int, int, int]
|
||||
) -> None:
|
||||
"""Write network order (big-endian) 16-bit sequence"""
|
||||
for v in values:
|
||||
if v < 0:
|
||||
v += 65536
|
||||
fp.write(_binary.o16be(v))
|
||||
|
||||
|
||||
class FontFile:
|
||||
"""Base class for raster font file handlers."""
|
||||
|
||||
bitmap: Image.Image | None = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.info: dict[bytes, bytes | int] = {}
|
||||
self.glyph: list[
|
||||
tuple[
|
||||
tuple[int, int],
|
||||
tuple[int, int, int, int],
|
||||
tuple[int, int, int, int],
|
||||
Image.Image,
|
||||
]
|
||||
| None
|
||||
] = [None] * 256
|
||||
|
||||
def __getitem__(self, ix: int) -> (
|
||||
tuple[
|
||||
tuple[int, int],
|
||||
tuple[int, int, int, int],
|
||||
tuple[int, int, int, int],
|
||||
Image.Image,
|
||||
]
|
||||
| None
|
||||
):
|
||||
return self.glyph[ix]
|
||||
|
||||
def compile(self) -> None:
|
||||
"""Create metrics and bitmap"""
|
||||
|
||||
if self.bitmap:
|
||||
return
|
||||
|
||||
# create bitmap large enough to hold all data
|
||||
h = w = maxwidth = 0
|
||||
lines = 1
|
||||
for glyph in self.glyph:
|
||||
if glyph:
|
||||
d, dst, src, im = glyph
|
||||
h = max(h, src[3] - src[1])
|
||||
w = w + (src[2] - src[0])
|
||||
if w > WIDTH:
|
||||
lines += 1
|
||||
w = src[2] - src[0]
|
||||
maxwidth = max(maxwidth, w)
|
||||
|
||||
xsize = maxwidth
|
||||
ysize = lines * h
|
||||
|
||||
if xsize == 0 and ysize == 0:
|
||||
return
|
||||
|
||||
self.ysize = h
|
||||
|
||||
# paste glyphs into bitmap
|
||||
self.bitmap = Image.new("1", (xsize, ysize))
|
||||
self.metrics: list[
|
||||
tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]]
|
||||
| None
|
||||
] = [None] * 256
|
||||
x = y = 0
|
||||
for i in range(256):
|
||||
glyph = self[i]
|
||||
if glyph:
|
||||
d, dst, src, im = glyph
|
||||
xx = src[2] - src[0]
|
||||
x0, y0 = x, y
|
||||
x = x + xx
|
||||
if x > WIDTH:
|
||||
x, y = 0, y + h
|
||||
x0, y0 = x, y
|
||||
x = xx
|
||||
s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
|
||||
self.bitmap.paste(im.crop(src), s)
|
||||
self.metrics[i] = d, dst, s
|
||||
|
||||
def save(self, filename: str) -> None:
|
||||
"""Save font"""
|
||||
|
||||
self.compile()
|
||||
|
||||
# font data
|
||||
if not self.bitmap:
|
||||
msg = "No bitmap created"
|
||||
raise ValueError(msg)
|
||||
self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG")
|
||||
|
||||
# font metrics
|
||||
with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp:
|
||||
fp.write(b"PILfont\n")
|
||||
fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!!
|
||||
fp.write(b"DATA\n")
|
||||
for id in range(256):
|
||||
m = self.metrics[id]
|
||||
if not m:
|
||||
puti16(fp, (0,) * 10)
|
||||
else:
|
||||
puti16(fp, m[0] + m[1] + m[2])
|
||||
259
venv/lib/python3.11/site-packages/PIL/FpxImagePlugin.py
Normal file
259
venv/lib/python3.11/site-packages/PIL/FpxImagePlugin.py
Normal file
@ -0,0 +1,259 @@
|
||||
#
|
||||
# THIS IS WORK IN PROGRESS
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# FlashPix support for PIL
|
||||
#
|
||||
# History:
|
||||
# 97-01-25 fl Created (reads uncompressed RGB images only)
|
||||
#
|
||||
# Copyright (c) Secret Labs AB 1997.
|
||||
# Copyright (c) Fredrik Lundh 1997.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import olefile
|
||||
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i32le as i32
|
||||
|
||||
# we map from colour field tuples to (mode, rawmode) descriptors
|
||||
MODES = {
|
||||
# opacity
|
||||
(0x00007FFE,): ("A", "L"),
|
||||
# monochrome
|
||||
(0x00010000,): ("L", "L"),
|
||||
(0x00018000, 0x00017FFE): ("RGBA", "LA"),
|
||||
# photo YCC
|
||||
(0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
|
||||
(0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"),
|
||||
# standard RGB (NIFRGB)
|
||||
(0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
|
||||
(0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"),
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(olefile.MAGIC)
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for the FlashPix images.
|
||||
|
||||
|
||||
class FpxImageFile(ImageFile.ImageFile):
|
||||
format = "FPX"
|
||||
format_description = "FlashPix"
|
||||
|
||||
def _open(self) -> None:
|
||||
#
|
||||
# read the OLE directory and see if this is a likely
|
||||
# to be a FlashPix file
|
||||
|
||||
assert self.fp is not None
|
||||
try:
|
||||
self.ole = olefile.OleFileIO(self.fp)
|
||||
except OSError as e:
|
||||
msg = "not an FPX file; invalid OLE file"
|
||||
raise SyntaxError(msg) from e
|
||||
|
||||
root = self.ole.root
|
||||
if not root or root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
|
||||
msg = "not an FPX file; bad root CLSID"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
self._open_index(1)
|
||||
|
||||
def _open_index(self, index: int = 1) -> None:
|
||||
#
|
||||
# get the Image Contents Property Set
|
||||
|
||||
prop = self.ole.getproperties(
|
||||
[f"Data Object Store {index:06d}", "\005Image Contents"]
|
||||
)
|
||||
|
||||
# size (highest resolution)
|
||||
|
||||
assert isinstance(prop[0x1000002], int)
|
||||
assert isinstance(prop[0x1000003], int)
|
||||
self._size = prop[0x1000002], prop[0x1000003]
|
||||
|
||||
size = max(self.size)
|
||||
i = 1
|
||||
while size > 64:
|
||||
size = size // 2
|
||||
i += 1
|
||||
self.maxid = i - 1
|
||||
|
||||
# mode. instead of using a single field for this, flashpix
|
||||
# requires you to specify the mode for each channel in each
|
||||
# resolution subimage, and leaves it to the decoder to make
|
||||
# sure that they all match. for now, we'll cheat and assume
|
||||
# that this is always the case.
|
||||
|
||||
id = self.maxid << 16
|
||||
|
||||
s = prop[0x2000002 | id]
|
||||
|
||||
if not isinstance(s, bytes) or (bands := i32(s, 4)) > 4:
|
||||
msg = "Invalid number of bands"
|
||||
raise OSError(msg)
|
||||
|
||||
# note: for now, we ignore the "uncalibrated" flag
|
||||
colors = tuple(i32(s, 8 + i * 4) & 0x7FFFFFFF for i in range(bands))
|
||||
|
||||
self._mode, self.rawmode = MODES[colors]
|
||||
|
||||
# load JPEG tables, if any
|
||||
self.jpeg = {}
|
||||
for i in range(256):
|
||||
id = 0x3000001 | (i << 16)
|
||||
if id in prop:
|
||||
self.jpeg[i] = prop[id]
|
||||
|
||||
self._open_subimage(1, self.maxid)
|
||||
|
||||
def _open_subimage(self, index: int = 1, subimage: int = 0) -> None:
|
||||
#
|
||||
# setup tile descriptors for a given subimage
|
||||
|
||||
stream = [
|
||||
f"Data Object Store {index:06d}",
|
||||
f"Resolution {subimage:04d}",
|
||||
"Subimage 0000 Header",
|
||||
]
|
||||
|
||||
fp = self.ole.openstream(stream)
|
||||
|
||||
# skip prefix
|
||||
fp.read(28)
|
||||
|
||||
# header stream
|
||||
s = fp.read(36)
|
||||
|
||||
size = i32(s, 4), i32(s, 8)
|
||||
# tilecount = i32(s, 12)
|
||||
tilesize = i32(s, 16), i32(s, 20)
|
||||
# channels = i32(s, 24)
|
||||
offset = i32(s, 28)
|
||||
length = i32(s, 32)
|
||||
|
||||
if size != self.size:
|
||||
msg = "subimage mismatch"
|
||||
raise OSError(msg)
|
||||
|
||||
# get tile descriptors
|
||||
fp.seek(28 + offset)
|
||||
s = fp.read(i32(s, 12) * length)
|
||||
|
||||
x = y = 0
|
||||
xsize, ysize = size
|
||||
xtile, ytile = tilesize
|
||||
self.tile = []
|
||||
|
||||
for i in range(0, len(s), length):
|
||||
x1 = min(xsize, x + xtile)
|
||||
y1 = min(ysize, y + ytile)
|
||||
|
||||
compression = i32(s, i + 8)
|
||||
|
||||
if compression == 0:
|
||||
self.tile.append(
|
||||
ImageFile._Tile(
|
||||
"raw",
|
||||
(x, y, x1, y1),
|
||||
i32(s, i) + 28,
|
||||
self.rawmode,
|
||||
)
|
||||
)
|
||||
|
||||
elif compression == 1:
|
||||
# FIXME: the fill decoder is not implemented
|
||||
self.tile.append(
|
||||
ImageFile._Tile(
|
||||
"fill",
|
||||
(x, y, x1, y1),
|
||||
i32(s, i) + 28,
|
||||
(self.rawmode, s[12:16]),
|
||||
)
|
||||
)
|
||||
|
||||
elif compression == 2:
|
||||
internal_color_conversion = s[14]
|
||||
jpeg_tables = s[15]
|
||||
rawmode = self.rawmode
|
||||
|
||||
if internal_color_conversion:
|
||||
# The image is stored as usual (usually YCbCr).
|
||||
if rawmode == "RGBA":
|
||||
# For "RGBA", data is stored as YCbCrA based on
|
||||
# negative RGB. The following trick works around
|
||||
# this problem :
|
||||
jpegmode, rawmode = "YCbCrK", "CMYK"
|
||||
else:
|
||||
jpegmode = None # let the decoder decide
|
||||
|
||||
else:
|
||||
# The image is stored as defined by rawmode
|
||||
jpegmode = rawmode
|
||||
|
||||
self.tile.append(
|
||||
ImageFile._Tile(
|
||||
"jpeg",
|
||||
(x, y, x1, y1),
|
||||
i32(s, i) + 28,
|
||||
(rawmode, jpegmode),
|
||||
)
|
||||
)
|
||||
|
||||
# FIXME: jpeg tables are tile dependent; the prefix
|
||||
# data must be placed in the tile descriptor itself!
|
||||
|
||||
if jpeg_tables:
|
||||
self.tile_prefix = self.jpeg[jpeg_tables]
|
||||
|
||||
else:
|
||||
msg = "unknown/invalid compression"
|
||||
raise OSError(msg)
|
||||
|
||||
x = x + xtile
|
||||
if x >= xsize:
|
||||
x, y = 0, y + ytile
|
||||
if y >= ysize:
|
||||
break # isn't really required
|
||||
|
||||
assert self.fp is not None
|
||||
self.stream = stream
|
||||
self._fp = self.fp
|
||||
self.fp = None
|
||||
|
||||
def load(self) -> Image.core.PixelAccess | None:
|
||||
if not self.fp:
|
||||
self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"])
|
||||
|
||||
return ImageFile.ImageFile.load(self)
|
||||
|
||||
def close(self) -> None:
|
||||
self.ole.close()
|
||||
super().close()
|
||||
|
||||
def __exit__(self, *args: object) -> None:
|
||||
self.ole.close()
|
||||
super().__exit__()
|
||||
|
||||
|
||||
#
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
|
||||
|
||||
Image.register_extension(FpxImageFile.format, ".fpx")
|
||||
115
venv/lib/python3.11/site-packages/PIL/FtexImagePlugin.py
Normal file
115
venv/lib/python3.11/site-packages/PIL/FtexImagePlugin.py
Normal file
@ -0,0 +1,115 @@
|
||||
"""
|
||||
A Pillow loader for .ftc and .ftu files (FTEX)
|
||||
Jerome Leclanche <jerome@leclan.ch>
|
||||
|
||||
The contents of this file are hereby released in the public domain (CC0)
|
||||
Full text of the CC0 license:
|
||||
https://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
|
||||
|
||||
The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
|
||||
packed custom format called FTEX. This file format uses file extensions FTC
|
||||
and FTU.
|
||||
* FTC files are compressed textures (using standard texture compression).
|
||||
* FTU files are not compressed.
|
||||
Texture File Format
|
||||
The FTC and FTU texture files both use the same format. This
|
||||
has the following structure:
|
||||
{header}
|
||||
{format_directory}
|
||||
{data}
|
||||
Where:
|
||||
{header} = {
|
||||
u32:magic,
|
||||
u32:version,
|
||||
u32:width,
|
||||
u32:height,
|
||||
u32:mipmap_count,
|
||||
u32:format_count
|
||||
}
|
||||
|
||||
* The "magic" number is "FTEX".
|
||||
* "width" and "height" are the dimensions of the texture.
|
||||
* "mipmap_count" is the number of mipmaps in the texture.
|
||||
* "format_count" is the number of texture formats (different versions of the
|
||||
same texture) in this file.
|
||||
|
||||
{format_directory} = format_count * { u32:format, u32:where }
|
||||
|
||||
The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
|
||||
uncompressed textures.
|
||||
The texture data for a format starts at the position "where" in the file.
|
||||
|
||||
Each set of texture data in the file has the following structure:
|
||||
{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
|
||||
* "mipmap_size" is the number of bytes in that mip level. For compressed
|
||||
textures this is the size of the texture data compressed with DXT1. For 24 bit
|
||||
uncompressed textures, this is 3 * width * height. Following this are the image
|
||||
bytes for that mipmap level.
|
||||
|
||||
Note: All data is stored in little-Endian (Intel) byte order.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import struct
|
||||
from enum import IntEnum
|
||||
from io import BytesIO
|
||||
|
||||
from . import Image, ImageFile
|
||||
|
||||
MAGIC = b"FTEX"
|
||||
|
||||
|
||||
class Format(IntEnum):
|
||||
DXT1 = 0
|
||||
UNCOMPRESSED = 1
|
||||
|
||||
|
||||
class FtexImageFile(ImageFile.ImageFile):
|
||||
format = "FTEX"
|
||||
format_description = "Texture File Format (IW2:EOC)"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
if not _accept(self.fp.read(4)):
|
||||
msg = "not an FTEX file"
|
||||
raise SyntaxError(msg)
|
||||
struct.unpack("<i", self.fp.read(4)) # version
|
||||
self._size = struct.unpack("<2i", self.fp.read(8))
|
||||
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
|
||||
|
||||
# Only support single-format files.
|
||||
# I don't know of any multi-format file.
|
||||
assert format_count == 1
|
||||
|
||||
format, where = struct.unpack("<2i", self.fp.read(8))
|
||||
self.fp.seek(where)
|
||||
(mipmap_size,) = struct.unpack("<i", self.fp.read(4))
|
||||
|
||||
data = self.fp.read(mipmap_size)
|
||||
|
||||
if format == Format.DXT1:
|
||||
self._mode = "RGBA"
|
||||
self.tile = [ImageFile._Tile("bcn", (0, 0) + self.size, 0, (1,))]
|
||||
elif format == Format.UNCOMPRESSED:
|
||||
self._mode = "RGB"
|
||||
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 0, "RGB")]
|
||||
else:
|
||||
msg = f"Invalid texture compression format: {repr(format)}"
|
||||
raise ValueError(msg)
|
||||
|
||||
self.fp.close()
|
||||
self.fp = BytesIO(data)
|
||||
|
||||
def load_seek(self, pos: int) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return prefix.startswith(MAGIC)
|
||||
|
||||
|
||||
Image.register_open(FtexImageFile.format, FtexImageFile, _accept)
|
||||
Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"])
|
||||
103
venv/lib/python3.11/site-packages/PIL/GbrImagePlugin.py
Normal file
103
venv/lib/python3.11/site-packages/PIL/GbrImagePlugin.py
Normal file
@ -0,0 +1,103 @@
|
||||
#
|
||||
# The Python Imaging Library
|
||||
#
|
||||
# load a GIMP brush file
|
||||
#
|
||||
# History:
|
||||
# 96-03-14 fl Created
|
||||
# 16-01-08 es Version 2
|
||||
#
|
||||
# Copyright (c) Secret Labs AB 1997.
|
||||
# Copyright (c) Fredrik Lundh 1996.
|
||||
# Copyright (c) Eric Soroos 2016.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
#
|
||||
# See https://github.com/GNOME/gimp/blob/mainline/devel-docs/gbr.txt for
|
||||
# format documentation.
|
||||
#
|
||||
# This code Interprets version 1 and 2 .gbr files.
|
||||
# Version 1 files are obsolete, and should not be used for new
|
||||
# brushes.
|
||||
# Version 2 files are saved by GIMP v2.8 (at least)
|
||||
# Version 3 files have a format specifier of 18 for 16bit floats in
|
||||
# the color depth field. This is currently unsupported by Pillow.
|
||||
from __future__ import annotations
|
||||
|
||||
from . import Image, ImageFile
|
||||
from ._binary import i32be as i32
|
||||
|
||||
|
||||
def _accept(prefix: bytes) -> bool:
|
||||
return len(prefix) >= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2)
|
||||
|
||||
|
||||
##
|
||||
# Image plugin for the GIMP brush format.
|
||||
|
||||
|
||||
class GbrImageFile(ImageFile.ImageFile):
|
||||
format = "GBR"
|
||||
format_description = "GIMP brush file"
|
||||
|
||||
def _open(self) -> None:
|
||||
assert self.fp is not None
|
||||
header_size = i32(self.fp.read(4))
|
||||
if header_size < 20:
|
||||
msg = "not a GIMP brush"
|
||||
raise SyntaxError(msg)
|
||||
version = i32(self.fp.read(4))
|
||||
if version not in (1, 2):
|
||||
msg = f"Unsupported GIMP brush version: {version}"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
width = i32(self.fp.read(4))
|
||||
height = i32(self.fp.read(4))
|
||||
color_depth = i32(self.fp.read(4))
|
||||
if width == 0 or height == 0:
|
||||
msg = "not a GIMP brush"
|
||||
raise SyntaxError(msg)
|
||||
if color_depth not in (1, 4):
|
||||
msg = f"Unsupported GIMP brush color depth: {color_depth}"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
if version == 1:
|
||||
comment_length = header_size - 20
|
||||
else:
|
||||
comment_length = header_size - 28
|
||||
magic_number = self.fp.read(4)
|
||||
if magic_number != b"GIMP":
|
||||
msg = "not a GIMP brush, bad magic number"
|
||||
raise SyntaxError(msg)
|
||||
self.info["spacing"] = i32(self.fp.read(4))
|
||||
|
||||
self.info["comment"] = self.fp.read(comment_length)[:-1]
|
||||
|
||||
if color_depth == 1:
|
||||
self._mode = "L"
|
||||
else:
|
||||
self._mode = "RGBA"
|
||||
|
||||
self._size = width, height
|
||||
|
||||
# Image might not be small
|
||||
Image._decompression_bomb_check(self.size)
|
||||
|
||||
# Data is an uncompressed block of w * h * bytes/pixel
|
||||
self._data_size = width * height * color_depth
|
||||
|
||||
def load(self) -> Image.core.PixelAccess | None:
|
||||
if self._im is None:
|
||||
assert self.fp is not None
|
||||
self.im = Image.core.new(self.mode, self.size)
|
||||
self.frombytes(self.fp.read(self._data_size))
|
||||
return Image.Image.load(self)
|
||||
|
||||
|
||||
#
|
||||
# registry
|
||||
|
||||
|
||||
Image.register_open(GbrImageFile.format, GbrImageFile, _accept)
|
||||
Image.register_extension(GbrImageFile.format, ".gbr")
|
||||
102
venv/lib/python3.11/site-packages/PIL/GdImageFile.py
Normal file
102
venv/lib/python3.11/site-packages/PIL/GdImageFile.py
Normal file
@ -0,0 +1,102 @@
|
||||
#
|
||||
# The Python Imaging Library.
|
||||
# $Id$
|
||||
#
|
||||
# GD file handling
|
||||
#
|
||||
# History:
|
||||
# 1996-04-12 fl Created
|
||||
#
|
||||
# Copyright (c) 1997 by Secret Labs AB.
|
||||
# Copyright (c) 1996 by Fredrik Lundh.
|
||||
#
|
||||
# See the README file for information on usage and redistribution.
|
||||
#
|
||||
|
||||
|
||||
"""
|
||||
.. note::
|
||||
This format cannot be automatically recognized, so the
|
||||
class is not registered for use with :py:func:`PIL.Image.open()`. To open a
|
||||
gd file, use the :py:func:`PIL.GdImageFile.open()` function instead.
|
||||
|
||||
.. warning::
|
||||
THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This
|
||||
implementation is provided for convenience and demonstrational
|
||||
purposes only.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import IO
|
||||
|
||||
from . import ImageFile, ImagePalette, UnidentifiedImageError
|
||||
from ._binary import i16be as i16
|
||||
from ._binary import i32be as i32
|
||||
from ._typing import StrOrBytesPath
|
||||
|
||||
|
||||
class GdImageFile(ImageFile.ImageFile):
|
||||
"""
|
||||
Image plugin for the GD uncompressed format. Note that this format
|
||||
is not supported by the standard :py:func:`PIL.Image.open()` function. To use
|
||||
this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and
|
||||
use the :py:func:`PIL.GdImageFile.open()` function.
|
||||
"""
|
||||
|
||||
format = "GD"
|
||||
format_description = "GD uncompressed images"
|
||||
|
||||
def _open(self) -> None:
|
||||
# Header
|
||||
assert self.fp is not None
|
||||
|
||||
s = self.fp.read(1037)
|
||||
|
||||
if i16(s) not in [65534, 65535]:
|
||||
msg = "Not a valid GD 2.x .gd file"
|
||||
raise SyntaxError(msg)
|
||||
|
||||
self._mode = "P"
|
||||
self._size = i16(s, 2), i16(s, 4)
|
||||
|
||||
true_color = s[6]
|
||||
true_color_offset = 2 if true_color else 0
|
||||
|
||||
# transparency index
|
||||
tindex = i32(s, 7 + true_color_offset)
|
||||
if tindex < 256:
|
||||
self.info["transparency"] = tindex
|
||||
|
||||
self.palette = ImagePalette.raw(
|
||||
"RGBX", s[7 + true_color_offset + 6 : 7 + true_color_offset + 6 + 256 * 4]
|
||||
)
|
||||
|
||||
self.tile = [
|
||||
ImageFile._Tile(
|
||||
"raw",
|
||||
(0, 0) + self.size,
|
||||
7 + true_color_offset + 6 + 256 * 4,
|
||||
"L",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def open(fp: StrOrBytesPath | IO[bytes], mode: str = "r") -> GdImageFile:
|
||||
"""
|
||||
Load texture from a GD image file.
|
||||
|
||||
:param fp: GD file name, or an opened file handle.
|
||||
:param mode: Optional mode. In this version, if the mode argument
|
||||
is given, it must be "r".
|
||||
:returns: An image instance.
|
||||
:raises OSError: If the image could not be read.
|
||||
"""
|
||||
if mode != "r":
|
||||
msg = "bad mode"
|
||||
raise ValueError(msg)
|
||||
|
||||
try:
|
||||
return GdImageFile(fp)
|
||||
except SyntaxError as e:
|
||||
msg = "cannot identify this image file"
|
||||
raise UnidentifiedImageError(msg) from e
|
||||
1217
venv/lib/python3.11/site-packages/PIL/GifImagePlugin.py
Normal file
1217
venv/lib/python3.11/site-packages/PIL/GifImagePlugin.py
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user