Initial commit
This commit is contained in:
commit
d8b6ed0443
0
.vscode/config.json
vendored
Normal file
0
.vscode/config.json
vendored
Normal file
3
.vscode/deploy.yml
vendored
Normal file
3
.vscode/deploy.yml
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
- hosts: localhost
|
||||
roles:
|
||||
- docker-compose
|
14
.vscode/docker/backup.dockerfile
vendored
Normal file
14
.vscode/docker/backup.dockerfile
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
# Use the Alpine image as the base
|
||||
FROM alpine:latest as backup_custom
|
||||
|
||||
# Copy the entrypoint script into the container
|
||||
COPY entrypoint_backup.sh /usr/local/bin/entrypoint_backup.sh
|
||||
|
||||
# Switch to root user for setup
|
||||
USER root
|
||||
|
||||
# Make the entrypoint script executable
|
||||
RUN chmod +x /usr/local/bin/entrypoint_backup.sh
|
||||
|
||||
# Set the new entrypoint
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint_backup.sh"]
|
27
.vscode/docker/exporter.dockerfile
vendored
Normal file
27
.vscode/docker/exporter.dockerfile
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
# Use a Python slim image as base
|
||||
FROM python:3.9-slim as exporter_custom
|
||||
|
||||
# Set environment variables to avoid buffering
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
# Create and set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install required dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
libpq-dev && \
|
||||
pip install psycopg2-binary prometheus-client && \
|
||||
apt-get clean
|
||||
|
||||
# Create a directory for logs
|
||||
RUN mkdir /app/logs
|
||||
|
||||
# Copy the Python script into the container
|
||||
COPY pg_metrics_exporter.py /app/
|
||||
|
||||
# Set permissions for log directory (if required)
|
||||
RUN chmod 755 /app/logs
|
||||
|
||||
# Run the script and redirect logs to a file
|
||||
CMD ["python", "/app/pg_metrics_exporter.py", ">", "/app/logs/exporter.log", "2>&1"]
|
13
.vscode/docker/odoo.dockerfile
vendored
Normal file
13
.vscode/docker/odoo.dockerfile
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
# Use the existing Odoo image as the base
|
||||
FROM odoo:latest as odoo-custom
|
||||
|
||||
# Copy the entrypoint script into the container
|
||||
COPY entrypoint_odoo.sh /usr/local/bin/entrypoint_odoo.sh
|
||||
|
||||
USER root
|
||||
|
||||
# Make the entrypoint script executable
|
||||
RUN chmod +x /usr/local/bin/entrypoint_odoo.sh
|
||||
|
||||
# Set the new entrypoint
|
||||
ENTRYPOINT ["/usr/local/bin/entrypoint_odoo.sh"]
|
11
.vscode/entrypoint_backup.sh
vendored
Normal file
11
.vscode/entrypoint_backup.sh
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Install PostgreSQL client
|
||||
apk add --no-cache postgresql-client
|
||||
|
||||
# Wait until the PostgreSQL server is ready
|
||||
until pg_isready -h admin -U admin; do
|
||||
echo "Waiting for PostgreSQL..."
|
||||
sleep 2
|
||||
done
|
||||
|
4
.vscode/entrypoint_odoo.sh
vendored
Normal file
4
.vscode/entrypoint_odoo.sh
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
|
||||
sleep 20
|
||||
odoo -i base
|
13
.vscode/fluent.conf
vendored
Normal file
13
.vscode/fluent.conf
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<source>
|
||||
@type tail
|
||||
path "/var/log/odoo/odoo.log"
|
||||
pos_file "/fluentd/logs/odoo.pos"
|
||||
format none
|
||||
tag "odoo.log"
|
||||
</source>
|
||||
|
||||
<match odoo.log>
|
||||
@type file
|
||||
path "/fluentd/logs/collected-logs"
|
||||
</match>
|
||||
|
2
.vscode/inventory/hosts
vendored
Normal file
2
.vscode/inventory/hosts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
[localhost]
|
||||
localhost ansible_connection=local
|
3
.vscode/notes.txt
vendored
Normal file
3
.vscode/notes.txt
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
docker-compose -f roles/docker-compose/files/docker-compose.yml up -d
|
||||
|
||||
ansible-playbook -i hosts deploy.yml
|
8
.vscode/odoo.conf
vendored
Normal file
8
.vscode/odoo.conf
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
[options]
|
||||
db_host = admin
|
||||
db_port = 5432
|
||||
db_user = admin
|
||||
db_password = admin
|
||||
default_productivity_apps = True
|
||||
db_name = admin
|
||||
logfile = /var/log/odoo/odoo.log
|
59
.vscode/pg_metrics_exporter.py
vendored
Normal file
59
.vscode/pg_metrics_exporter.py
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
import psycopg2
|
||||
from prometheus_client import start_http_server, Gauge
|
||||
import time
|
||||
|
||||
# Configuration for database connection
|
||||
DB_PARAMS = {
|
||||
"host": "admin",
|
||||
"database": "admin",
|
||||
"user": "admin",
|
||||
"password": "admin"
|
||||
}
|
||||
|
||||
# Prometheus metrics
|
||||
QUERY_CALLS = Gauge('postgresql_query_calls', 'Number of calls for each query', ['query'])
|
||||
QUERY_TOTAL_TIME = Gauge('postgresql_query_total_time_ms', 'Total execution time for each query in ms', ['query'])
|
||||
|
||||
def fetch_metrics():
|
||||
try:
|
||||
# Log connection attempt
|
||||
print("Connecting to PostgreSQL database...")
|
||||
|
||||
conn = psycopg2.connect(**DB_PARAMS)
|
||||
cur = conn.cursor()
|
||||
|
||||
# Execute query to get data
|
||||
cur.execute("""
|
||||
SELECT query, calls, total_exec_time
|
||||
FROM pg_stat_statements
|
||||
ORDER BY total_exec_time DESC;
|
||||
""")
|
||||
|
||||
# Iterate through results and set Prometheus metrics
|
||||
for row in cur:
|
||||
query = row[0].replace("\\", "\\\\").replace('"', '\\"') # Escape special characters
|
||||
calls = row[1]
|
||||
total_time = row[2] * 1000 # Convert seconds to milliseconds
|
||||
|
||||
QUERY_CALLS.labels(query=query).set(calls)
|
||||
QUERY_TOTAL_TIME.labels(query=query).set(total_time)
|
||||
|
||||
# Log the metrics being set
|
||||
print(f"Metrics set for query: {query} | Calls: {calls} | Total execution time: {total_time} ms")
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
except psycopg2.Error as e:
|
||||
print(f"Error fetching data: {e}")
|
||||
except Exception as e:
|
||||
print(f"Unexpected error: {e}")
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Start Prometheus HTTP server on port 8000
|
||||
start_http_server(8000)
|
||||
print("Exporter running on http://localhost:8000/metrics")
|
||||
|
||||
# Main loop to fetch metrics at regular intervals
|
||||
while True:
|
||||
fetch_metrics()
|
||||
time.sleep(60) # Scrape every 60 seconds
|
0
.vscode/prometheus.yml
vendored
Normal file
0
.vscode/prometheus.yml
vendored
Normal file
109
.vscode/roles/docker-compose/files/docker-compose.yml
vendored
Normal file
109
.vscode/roles/docker-compose/files/docker-compose.yml
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
version: "3.9"
|
||||
services:
|
||||
admin:
|
||||
image: postgres:latest
|
||||
environment:
|
||||
POSTGRES_DB: admin
|
||||
POSTGRES_USER: admin
|
||||
POSTGRES_PASSWORD: admin
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- testNetwork
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:latest
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@admin.com
|
||||
PGADMIN_DEFAULT_PASSWORD: admin
|
||||
ports:
|
||||
- "5051:80"
|
||||
networks:
|
||||
- testNetwork
|
||||
|
||||
prometheus_exporter:
|
||||
image: exporter_custom
|
||||
ports:
|
||||
- "8000:8000"
|
||||
networks:
|
||||
- testNetwork
|
||||
depends_on:
|
||||
- admin
|
||||
|
||||
odoo:
|
||||
image: odoo_custom
|
||||
environment:
|
||||
HOST: admin
|
||||
USER: admin
|
||||
PASSWORD: admin
|
||||
DATABASE: admin
|
||||
ODOO_PASSWORD: admin
|
||||
ports:
|
||||
- "8069:8069"
|
||||
volumes:
|
||||
- ./odoo.conf:/etc/odoo/odoo.conf
|
||||
- ./logs/odoo:/var/log/odoo
|
||||
networks:
|
||||
- testNetwork
|
||||
depends_on:
|
||||
- admin
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
environment:
|
||||
GF_SECURITY_ADMIN_PASSWORD: grafana_pwd
|
||||
GF_DATASOURCES_PROMETHEUS_URL: http://prometheus:9090
|
||||
ports:
|
||||
- "3000:3000"
|
||||
networks:
|
||||
- testNetwork
|
||||
depends_on:
|
||||
- prometheus
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
ports:
|
||||
- "9090:9090"
|
||||
volumes:
|
||||
- prometheus-data:/prometheus
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
networks:
|
||||
- testNetwork
|
||||
|
||||
fluentd:
|
||||
image: fluent/fluentd:v1.13-1
|
||||
ports:
|
||||
- "24224:24224"
|
||||
volumes:
|
||||
- ./logs/odoo:/var/log/odoo
|
||||
- ./fluent.conf:/fluentd/etc/fluent.conf
|
||||
networks:
|
||||
- testNetwork
|
||||
depends_on:
|
||||
- odoo
|
||||
|
||||
|
||||
backup:
|
||||
image: backup_custom
|
||||
environment:
|
||||
POSTGRES_HOST: admin
|
||||
POSTGRES_DB: admin
|
||||
POSTGRES_USER: admin
|
||||
POSTGRES_PASSWORD: admin
|
||||
volumes:
|
||||
- backup-data:/backup
|
||||
networks:
|
||||
- testNetwork
|
||||
depends_on:
|
||||
- admin
|
||||
|
||||
networks:
|
||||
testNetwork:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
prometheus-data:
|
||||
backup-data:
|
21
.vscode/roles/docker-compose/tasks/main.yml
vendored
Normal file
21
.vscode/roles/docker-compose/tasks/main.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
---
|
||||
- name: Ensure Docker Compose is installed
|
||||
ansible.builtin.shell: |
|
||||
curl -L "https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
||||
chmod +x /usr/local/bin/docker-compose
|
||||
args:
|
||||
executable: /bin/bash
|
||||
become: yes
|
||||
|
||||
|
||||
- name: Copy Docker Compose file
|
||||
ansible.builtin.copy:
|
||||
src: roles/docker-compose/files/docker-compose.yml
|
||||
dest: /opt/docker-compose.yml
|
||||
become: yes
|
||||
|
||||
|
||||
- name: Run Docker Compose
|
||||
ansible.builtin.command:
|
||||
cmd: docker-compose up -d
|
||||
chdir: /opt
|
4
.vscode/roles/docker-compose/templates/environment.j2
vendored
Normal file
4
.vscode/roles/docker-compose/templates/environment.j2
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
POSTGRES_PASSWORD={{ POSTGRES_PASSWORD }}
|
||||
POSTGRES_USER={{ POSTGRES_USER }}
|
||||
POSTGRES_DB={{ POSTGRES_DB }}
|
||||
ODOO_PASSWORD={{ ODOO_PASSWORD }}
|
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"ansible.python.interpreterPath": "/bin/python3"
|
||||
}
|
247
myenv/bin/Activate.ps1
Normal file
247
myenv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
70
myenv/bin/activate
Normal file
70
myenv/bin/activate
Normal file
@ -0,0 +1,70 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# You cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
||||
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
|
||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
||||
export VIRTUAL_ENV=$(cygpath '/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv')
|
||||
else
|
||||
# use the path as-is
|
||||
export VIRTUAL_ENV='/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv'
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(myenv) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(myenv) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
27
myenv/bin/activate.csh
Normal file
27
myenv/bin/activate.csh
Normal file
@ -0,0 +1,27 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV '/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv'
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(myenv) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(myenv) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
69
myenv/bin/activate.fish
Normal file
69
myenv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/). You cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV '/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv'
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(myenv) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(myenv) '
|
||||
end
|
10
myenv/bin/debugpy
Normal file
10
myenv/bin/debugpy
Normal file
@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
'''exec' "/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3" "$0" "$@"
|
||||
' '''
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from debugpy.server.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
75
myenv/bin/get_gprof
Normal file
75
myenv/bin/get_gprof
Normal file
@ -0,0 +1,75 @@
|
||||
#!/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3
|
||||
#
|
||||
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
||||
# Copyright (c) 2008-2016 California Institute of Technology.
|
||||
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
||||
# License: 3-clause BSD. The full license text is available at:
|
||||
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
||||
'''
|
||||
build profile graph for the given instance
|
||||
|
||||
running:
|
||||
$ get_gprof <args> <instance>
|
||||
|
||||
executes:
|
||||
gprof2dot -f pstats <args> <type>.prof | dot -Tpng -o <type>.call.png
|
||||
|
||||
where:
|
||||
<args> are arguments for gprof2dot, such as "-n 5 -e 5"
|
||||
<instance> is code to create the instance to profile
|
||||
<type> is the class of the instance (i.e. type(instance))
|
||||
|
||||
For example:
|
||||
$ get_gprof -n 5 -e 1 "import numpy; numpy.array([1,2])"
|
||||
|
||||
will create 'ndarray.call.png' with the profile graph for numpy.array([1,2]),
|
||||
where '-n 5' eliminates nodes below 5% threshold, similarly '-e 1' eliminates
|
||||
edges below 1% threshold
|
||||
'''
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if len(sys.argv) < 2:
|
||||
print ("Please provide an object instance (e.g. 'import math; math.pi')")
|
||||
sys.exit()
|
||||
# grab args for gprof2dot
|
||||
args = sys.argv[1:-1]
|
||||
args = ' '.join(args)
|
||||
# last arg builds the object
|
||||
obj = sys.argv[-1]
|
||||
obj = obj.split(';')
|
||||
# multi-line prep for generating an instance
|
||||
for line in obj[:-1]:
|
||||
exec(line)
|
||||
# one-line generation of an instance
|
||||
try:
|
||||
obj = eval(obj[-1])
|
||||
except Exception:
|
||||
print ("Error processing object instance")
|
||||
sys.exit()
|
||||
|
||||
# get object 'name'
|
||||
objtype = type(obj)
|
||||
name = getattr(objtype, '__name__', getattr(objtype, '__class__', objtype))
|
||||
|
||||
# profile dumping an object
|
||||
import dill
|
||||
import os
|
||||
import cProfile
|
||||
#name = os.path.splitext(os.path.basename(__file__))[0]
|
||||
cProfile.run("dill.dumps(obj)", filename="%s.prof" % name)
|
||||
msg = "gprof2dot -f pstats %s %s.prof | dot -Tpng -o %s.call.png" % (args, name, name)
|
||||
try:
|
||||
res = os.system(msg)
|
||||
except Exception:
|
||||
print ("Please verify install of 'gprof2dot' to view profile graphs")
|
||||
if res:
|
||||
print ("Please verify install of 'gprof2dot' to view profile graphs")
|
||||
|
||||
# get stats
|
||||
f_prof = "%s.prof" % name
|
||||
import pstats
|
||||
stats = pstats.Stats(f_prof, stream=sys.stdout)
|
||||
stats.strip_dirs().sort_stats('cumtime')
|
||||
stats.print_stats(20) #XXX: save to file instead of print top 20?
|
||||
os.remove(f_prof)
|
54
myenv/bin/get_objgraph
Normal file
54
myenv/bin/get_objgraph
Normal file
@ -0,0 +1,54 @@
|
||||
#!/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3
|
||||
#
|
||||
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
||||
# Copyright (c) 2008-2016 California Institute of Technology.
|
||||
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
||||
# License: 3-clause BSD. The full license text is available at:
|
||||
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
||||
"""
|
||||
display the reference paths for objects in ``dill.types`` or a .pkl file
|
||||
|
||||
Notes:
|
||||
the generated image is useful in showing the pointer references in
|
||||
objects that are or can be pickled. Any object in ``dill.objects``
|
||||
listed in ``dill.load_types(picklable=True, unpicklable=True)`` works.
|
||||
|
||||
Examples::
|
||||
|
||||
$ get_objgraph ArrayType
|
||||
Image generated as ArrayType.png
|
||||
"""
|
||||
|
||||
import dill as pickle
|
||||
#pickle.debug.trace(True)
|
||||
#import pickle
|
||||
|
||||
# get all objects for testing
|
||||
from dill import load_types
|
||||
load_types(pickleable=True,unpickleable=True)
|
||||
from dill import objects
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print ("Please provide exactly one file or type name (e.g. 'IntType')")
|
||||
msg = "\n"
|
||||
for objtype in list(objects.keys())[:40]:
|
||||
msg += objtype + ', '
|
||||
print (msg + "...")
|
||||
else:
|
||||
objtype = str(sys.argv[-1])
|
||||
try:
|
||||
obj = objects[objtype]
|
||||
except KeyError:
|
||||
obj = pickle.load(open(objtype,'rb'))
|
||||
import os
|
||||
objtype = os.path.splitext(objtype)[0]
|
||||
try:
|
||||
import objgraph
|
||||
objgraph.show_refs(obj, filename=objtype+'.png')
|
||||
except ImportError:
|
||||
print ("Please install 'objgraph' to view object graphs")
|
||||
|
||||
|
||||
# EOF
|
10
myenv/bin/pip
Normal file
10
myenv/bin/pip
Normal file
@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
'''exec' "/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3" "$0" "$@"
|
||||
' '''
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
10
myenv/bin/pip3
Normal file
10
myenv/bin/pip3
Normal file
@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
'''exec' "/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3" "$0" "$@"
|
||||
' '''
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
10
myenv/bin/pip3.12
Normal file
10
myenv/bin/pip3.12
Normal file
@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
'''exec' "/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3" "$0" "$@"
|
||||
' '''
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
10
myenv/bin/pysemver
Normal file
10
myenv/bin/pysemver
Normal file
@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
'''exec' "/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3" "$0" "$@"
|
||||
' '''
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from semver import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
1
myenv/bin/python
Symbolic link
1
myenv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
1
myenv/bin/python3
Symbolic link
1
myenv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
1
myenv/bin/python3.12
Symbolic link
1
myenv/bin/python3.12
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
22
myenv/bin/undill
Normal file
22
myenv/bin/undill
Normal file
@ -0,0 +1,22 @@
|
||||
#!/mnt/c/Users/Feten Dridi/Desktop/projetAnsible/myenv/bin/python3
|
||||
#
|
||||
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
||||
# Copyright (c) 2008-2016 California Institute of Technology.
|
||||
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
||||
# License: 3-clause BSD. The full license text is available at:
|
||||
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
||||
"""
|
||||
unpickle the contents of a pickled object file
|
||||
|
||||
Examples::
|
||||
|
||||
$ undill hello.pkl
|
||||
['hello', 'world']
|
||||
"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import dill
|
||||
for file in sys.argv[1:]:
|
||||
print (dill.load(open(file,'rb')))
|
||||
|
@ -0,0 +1,11 @@
|
||||
Arpeggio - Parser interpreter based on PEG grammars
|
||||
|
||||
Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
|
||||
|
||||
|
||||
# Contributors
|
||||
|
||||
- Bug reports/ideas: https://github.com/textX/Arpeggio/issues?utf8=%E2%9C%93&q=is%3Aissue
|
||||
- Code/docs contributions:
|
||||
- https://github.com/textX/Arpeggio/graphs/contributors
|
||||
- https://github.com/textX/Arpeggio/pulls?utf8=%E2%9C%93&q=is%3Apr+
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,22 @@
|
||||
Arpeggio is released under the terms of the MIT License
|
||||
-------------------------------------------------------
|
||||
|
||||
Copyright (c) 2009-2019 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@ -0,0 +1,54 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Arpeggio
|
||||
Version: 2.0.2
|
||||
Summary: Packrat parser interpreter
|
||||
Home-page: https://github.com/textX/Arpeggio
|
||||
Download-URL:
|
||||
Author: Igor R. Dejanovic
|
||||
Author-email: igor.dejanovic@gmail.com
|
||||
License: MIT
|
||||
Keywords: parser,PEG,packrat,library,interpreter
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Intended Audience :: Information Technology
|
||||
Classifier: Intended Audience :: Science/Research
|
||||
Classifier: Topic :: Software Development :: Interpreters
|
||||
Classifier: Topic :: Software Development :: Compilers
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Description-Content-Type: text/markdown
|
||||
License-File: LICENSE
|
||||
License-File: AUTHORS.md
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: wheel ; extra == 'dev'
|
||||
Requires-Dist: mkdocs ; extra == 'dev'
|
||||
Requires-Dist: mike ; extra == 'dev'
|
||||
Requires-Dist: twine ; extra == 'dev'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: flake8 ; extra == 'test'
|
||||
Requires-Dist: coverage ; extra == 'test'
|
||||
Requires-Dist: coveralls ; extra == 'test'
|
||||
Requires-Dist: pytest ; extra == 'test'
|
||||
|
||||

|
||||
|
||||
[](https://pypi.python.org/pypi/Arpeggio)
|
||||

|
||||
[](https://github.com/textx/Arpeggio/actions)
|
||||
[](https://coveralls.io/github/textX/Arpeggio)
|
||||
[](http://textx.github.io/Arpeggio/latest/)
|
||||
|
||||
Arpeggio is a recursive descent parser with memoization based on PEG grammars
|
||||
(aka Packrat parser).
|
||||
|
||||
Documentation with tutorials is available [here](http://textx.github.io/Arpeggio/).
|
||||
|
||||
**Note:** for a higher level parsing/language tool (i.e., a nicer interface to
|
||||
Arpeggio) see [textX](https://github.com/textX/textX).
|
@ -0,0 +1,103 @@
|
||||
Arpeggio-2.0.2.dist-info/AUTHORS.md,sha256=Qmm0ctdqOMSZLg-z_PnaBKAxGXuWqsD4l5q_WXJe7Ao,381
|
||||
Arpeggio-2.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Arpeggio-2.0.2.dist-info/LICENSE,sha256=U-vbB6IMAKHb2WVqNp6Ldb-on9o0K1QTMY4LTgvpHys,1218
|
||||
Arpeggio-2.0.2.dist-info/METADATA,sha256=HqmT2lyKp5jZRa2zDre676_Ou1XtQLD3hdPnB7ghTAU,2445
|
||||
Arpeggio-2.0.2.dist-info/RECORD,,
|
||||
Arpeggio-2.0.2.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
|
||||
Arpeggio-2.0.2.dist-info/top_level.txt,sha256=kdvFB1a87VHff27Uaeq70GZjKVS6HBV_keZDL20k8jE,9
|
||||
arpeggio/__init__.py,sha256=2GRZeypUO1OGiUuuJmusj6EmVnagwxLwyUFQf84Vnm0,64627
|
||||
arpeggio/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/__pycache__/cleanpeg.cpython-312.pyc,,
|
||||
arpeggio/__pycache__/export.cpython-312.pyc,,
|
||||
arpeggio/__pycache__/peg.cpython-312.pyc,,
|
||||
arpeggio/__pycache__/utils.cpython-312.pyc,,
|
||||
arpeggio/cleanpeg.py,sha256=Cg-aEXuIjKWhcbLLT__cpxCsjiPDChXOpeDIoT-KRGQ,2867
|
||||
arpeggio/export.py,sha256=WnznnPw7_CHpG4nWG7r19hcV-L_MQj35KV9XUQJxtq8,6689
|
||||
arpeggio/peg.py,sha256=VUVTwarDUxobeWQ7OOmox5WcBByhfm6qwHLPJCwXaBs,10659
|
||||
arpeggio/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_decorator_combine.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_default_semantic_action.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_eolterm.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_error_reporting.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_examples.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_exporter.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_flags.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_parser_params.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_parser_resilience.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_parsing_expressions.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_pathologic_models.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_peg_parser.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_position.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_ptnode_navigation_expressions.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_python_parser.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_reduce_tree.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_semantic_action_results.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_separators.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_sequence_params.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_suppression.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_unicode.cpython-312.pyc,,
|
||||
arpeggio/tests/__pycache__/test_visitor.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/__pycache__/test_direct_rule_call.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/__pycache__/test_memoization.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_16/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_16/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_16/__pycache__/test_issue_16.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_16/test_issue_16.py,sha256=u1t8jPSOw0sr4-Yu6wLf8TTIfDzehXmvPas5dZBMtVs,2145
|
||||
arpeggio/tests/regressions/issue_20/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_20/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_20/__pycache__/test_issue_20.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_20/test_issue_20.py,sha256=s8Gn9Bcyxn-Rx63x1QK7ryV9G9howI2OHcc0MLDnSG0,828
|
||||
arpeggio/tests/regressions/issue_22/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_22/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_22/__pycache__/test_issue_22.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_22/grammar1.peg,sha256=iI7a6N9Ed3QcLdchLA_Nq18NstAGRVKPNLdmcNykars,849
|
||||
arpeggio/tests/regressions/issue_22/grammar2.peg,sha256=8SUzUN2bMp-taRZIW0j1adFa5PI-xd6Ud6aJvHvoMII,723
|
||||
arpeggio/tests/regressions/issue_22/test_issue_22.py,sha256=-NyBT8bcxq_wCmvKILLRuGvOzFdJm6gYXZwj2Jd28IY,550
|
||||
arpeggio/tests/regressions/issue_26/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_26/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_26/__pycache__/test_issue_26.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_26/test_issue_26.py,sha256=uguIaMdtzjQYqQ_Lge4OONEtOhCH4Og2x1ycBMCkuUo,407
|
||||
arpeggio/tests/regressions/issue_31/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_31/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_31/__pycache__/test_issue_31.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_31/test_issue_31.py,sha256=uRMHaMFNGVUOd6hh8xfQla1002ENwgiVJKtcZYl4Tk4,358
|
||||
arpeggio/tests/regressions/issue_32/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_32/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_32/__pycache__/test_issue_32.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_32/test_issue_32.py,sha256=U5vWwepRnfqiIHsr8sGGwyKIXf-WEhxb5NiFHNDB9Xc,8732
|
||||
arpeggio/tests/regressions/issue_43/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
arpeggio/tests/regressions/issue_43/__pycache__/__init__.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_43/__pycache__/test_issue43.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_43/test_issue43.py,sha256=iBkussTWmmNk8rsvEHoEbaBm_T_H1WpCa-YSoaRYTyM,562
|
||||
arpeggio/tests/regressions/issue_61/__pycache__/test_issue_61.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_61/test_issue_61.py,sha256=Jflo-G3EbbP5dgJKDcu5ZljH64IUWC0CgEH9jpInLkU,1341
|
||||
arpeggio/tests/regressions/issue_73/__pycache__/test_issue_73.cpython-312.pyc,,
|
||||
arpeggio/tests/regressions/issue_73/test_issue_73.py,sha256=Kq244G_bVCroP37_t8nXMPPkKqDzHsiv8hhE-RWfD4g,966
|
||||
arpeggio/tests/regressions/test_direct_rule_call.py,sha256=Ndqhqn5ZK258MGFCEGRp_En8qnMjneHscgCF4xZ8_Rk,918
|
||||
arpeggio/tests/regressions/test_memoization.py,sha256=FUm_aw4zwlwXnvGTVFpx2FIYXmPRo0neq8VhPux_siI,1527
|
||||
arpeggio/tests/test_decorator_combine.py,sha256=lsxoWNHynJ1-osWEisosSI-2P-2JSWk7J5v22Me6Vp4,1365
|
||||
arpeggio/tests/test_default_semantic_action.py,sha256=2ReNg2oJp3mT9nzYo_7xQa0_OHhUQMrYfY-HNyWFJto,2074
|
||||
arpeggio/tests/test_eolterm.py,sha256=6fA-dPUhklwwQIQh3n9qKueyC3Ww9OR795x8RNVnE5k,1000
|
||||
arpeggio/tests/test_error_reporting.py,sha256=apTYLrI0L-DSPdqNHFV7Wtt0Ahm5egbO1TuqiEvq1I4,5938
|
||||
arpeggio/tests/test_examples.py,sha256=J524z_4smnswnwrukaxETiSXxch63s5hHI9PL4h51cc,1659
|
||||
arpeggio/tests/test_exporter.py,sha256=4-0V5tsQIRkHpQ43zwHulz2ML8EvcIwD1pzpDUXJJ44,1633
|
||||
arpeggio/tests/test_flags.py,sha256=v8oWzMtHzCGc1Pcc-XThpPokeSFkB9XYUB2nKRvgYgA,1968
|
||||
arpeggio/tests/test_parser_params.py,sha256=AuYk3R7LPMjTKtFLaC0mIhFndUNQQJsS4CeHq9wFxcU,2958
|
||||
arpeggio/tests/test_parser_resilience.py,sha256=63cVOwqnhp58Jk1_xniUZ-o3BfIICRndzB4MZ0sGar4,384
|
||||
arpeggio/tests/test_parsing_expressions.py,sha256=87aV7dqp59i3X83YalZHfRQN8lJmlmEYxKgLxy3WUPg,9336
|
||||
arpeggio/tests/test_pathologic_models.py,sha256=PxrDzY3_39b7G7fMywQBU9u2gwOHhy3st2cQWUjBbgs,1015
|
||||
arpeggio/tests/test_peg_parser.py,sha256=upISz7cMasuV58e-VKgUVNQUCGJZLEPuaaCz54JcP6I,3574
|
||||
arpeggio/tests/test_position.py,sha256=czNZNsADNpaqwv-7V-wPtsWHnzSmP_KtuGi7WJpEMmY,1522
|
||||
arpeggio/tests/test_ptnode_navigation_expressions.py,sha256=CB7eir5bXoEo3mzuHjJbrDoJTqxNykBNbGLoKB7qGEk,2342
|
||||
arpeggio/tests/test_python_parser.py,sha256=BrtRARU26K8pxjUQOdci_5GDcRIjvVo8VWtw47kW4h8,2100
|
||||
arpeggio/tests/test_reduce_tree.py,sha256=c9gd5qhqrhb2lhMT437n9OcVpecHjVTLMhOYXDYw0fU,1744
|
||||
arpeggio/tests/test_semantic_action_results.py,sha256=ZdnXLo8hJsnaNZ0NF5nOpVH5Yv1NKKhu_S1nm11ugcs,1713
|
||||
arpeggio/tests/test_separators.py,sha256=KVYVQGp9vwpjs88z9BdP0sbfN1Zk1tnG1NLIMOlP7rk,1482
|
||||
arpeggio/tests/test_sequence_params.py,sha256=NpaYme4TX586MFweO9lvLIPzEEVcxJZue52JN3ai3tA,2508
|
||||
arpeggio/tests/test_suppression.py,sha256=jt19AHlLUsy2kqLyh8HloSX03u9SV5ENmpSZO8mqPHI,1835
|
||||
arpeggio/tests/test_unicode.py,sha256=Z2oS8b_mGNh5VX0yRdCfVsdE-bHYjxa6zu3BJc8TYzw,751
|
||||
arpeggio/tests/test_visitor.py,sha256=TX7nl55N6XWFaTnr4JLeCJxbx9WkM3M8bWt993F-Rgs,1709
|
||||
arpeggio/utils.py,sha256=eb5yjVMI9ScTLcZ5IfAsiyF2dewV22KWpsfhW5j39kg,416
|
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.40.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1 @@
|
||||
arpeggio
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,20 @@
|
||||
Copyright (c) 2017-2021 Ingy döt Net
|
||||
Copyright (c) 2006-2016 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -0,0 +1,46 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyYAML
|
||||
Version: 6.0.2
|
||||
Summary: YAML parser and emitter for Python
|
||||
Home-page: https://pyyaml.org/
|
||||
Download-URL: https://pypi.org/project/PyYAML/
|
||||
Author: Kirill Simonov
|
||||
Author-email: xi@resolvent.net
|
||||
License: MIT
|
||||
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
||||
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
||||
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
||||
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
||||
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
||||
Platform: Any
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Cython
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Text Processing :: Markup
|
||||
Requires-Python: >=3.8
|
||||
License-File: LICENSE
|
||||
|
||||
YAML is a data serialization format designed for human readability
|
||||
and interaction with scripting languages. PyYAML is a YAML parser
|
||||
and emitter for Python.
|
||||
|
||||
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
||||
support, capable extension API, and sensible error messages. PyYAML
|
||||
supports standard YAML tags and provides Python-specific tags that
|
||||
allow to represent an arbitrary Python object.
|
||||
|
||||
PyYAML is applicable for a broad range of tasks from complex
|
||||
configuration files to object serialization and persistence.
|
@ -0,0 +1,43 @@
|
||||
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
||||
PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060
|
||||
PyYAML-6.0.2.dist-info/RECORD,,
|
||||
PyYAML-6.0.2.dist-info/WHEEL,sha256=1pP4yhrbipRtdbm4Rbg3aoTjzc7pDhpHKO0CEY24CNM,152
|
||||
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
||||
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
||||
_yaml/__pycache__/__init__.cpython-312.pyc,,
|
||||
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
|
||||
yaml/__pycache__/__init__.cpython-312.pyc,,
|
||||
yaml/__pycache__/composer.cpython-312.pyc,,
|
||||
yaml/__pycache__/constructor.cpython-312.pyc,,
|
||||
yaml/__pycache__/cyaml.cpython-312.pyc,,
|
||||
yaml/__pycache__/dumper.cpython-312.pyc,,
|
||||
yaml/__pycache__/emitter.cpython-312.pyc,,
|
||||
yaml/__pycache__/error.cpython-312.pyc,,
|
||||
yaml/__pycache__/events.cpython-312.pyc,,
|
||||
yaml/__pycache__/loader.cpython-312.pyc,,
|
||||
yaml/__pycache__/nodes.cpython-312.pyc,,
|
||||
yaml/__pycache__/parser.cpython-312.pyc,,
|
||||
yaml/__pycache__/reader.cpython-312.pyc,,
|
||||
yaml/__pycache__/representer.cpython-312.pyc,,
|
||||
yaml/__pycache__/resolver.cpython-312.pyc,,
|
||||
yaml/__pycache__/scanner.cpython-312.pyc,,
|
||||
yaml/__pycache__/serializer.cpython-312.pyc,,
|
||||
yaml/__pycache__/tokens.cpython-312.pyc,,
|
||||
yaml/_yaml.cpython-312-x86_64-linux-gnu.so,sha256=PJFgxnc0f5Dyde6WKmBm6fZWapawmWl7aBRruXjRA80,2481784
|
||||
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
||||
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
||||
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
||||
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
||||
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
||||
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
||||
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
||||
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
||||
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
||||
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
||||
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
||||
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
||||
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
||||
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
||||
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
||||
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.44.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
||||
Tag: cp312-cp312-manylinux2014_x86_64
|
||||
|
@ -0,0 +1,2 @@
|
||||
_yaml
|
||||
yaml
|
Binary file not shown.
Binary file not shown.
33
myenv/lib/python3.12/site-packages/_yaml/__init__.py
Normal file
33
myenv/lib/python3.12/site-packages/_yaml/__init__.py
Normal file
@ -0,0 +1,33 @@
|
||||
# This is a stub package designed to roughly emulate the _yaml
|
||||
# extension module, which previously existed as a standalone module
|
||||
# and has been moved into the `yaml` package namespace.
|
||||
# It does not perfectly mimic its old counterpart, but should get
|
||||
# close enough for anyone who's relying on it even when they shouldn't.
|
||||
import yaml
|
||||
|
||||
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
||||
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
||||
if not getattr(yaml, '__with_libyaml__', False):
|
||||
from sys import version_info
|
||||
|
||||
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
||||
raise exc("No module named '_yaml'")
|
||||
else:
|
||||
from yaml._yaml import *
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'The _yaml extension module is now located at yaml._yaml'
|
||||
' and its location is subject to change. To use the'
|
||||
' LibYAML-based parser and emitter, import from `yaml`:'
|
||||
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
||||
DeprecationWarning
|
||||
)
|
||||
del warnings
|
||||
# Don't `del yaml` here because yaml is actually an existing
|
||||
# namespace member of _yaml.
|
||||
|
||||
__name__ = '_yaml'
|
||||
# If the module is top-level (i.e. not a part of any specific package)
|
||||
# then the attribute should be set to ''.
|
||||
# https://docs.python.org/3.8/library/types.html
|
||||
__package__ = ''
|
Binary file not shown.
1924
myenv/lib/python3.12/site-packages/arpeggio/__init__.py
Normal file
1924
myenv/lib/python3.12/site-packages/arpeggio/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
69
myenv/lib/python3.12/site-packages/arpeggio/cleanpeg.py
Normal file
69
myenv/lib/python3.12/site-packages/arpeggio/cleanpeg.py
Normal file
@ -0,0 +1,69 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#######################################################################
|
||||
# Name: cleanpeg.py
|
||||
# Purpose: This module is a variation of the original peg.py.
|
||||
# The syntax is slightly changed to be more readable and familiar to
|
||||
# python users. It is based on the Yash's suggestion - issue 11
|
||||
# Author: Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
|
||||
# Copyright: (c) 2014-2017 Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
|
||||
# License: MIT License
|
||||
#######################################################################
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
from arpeggio import Optional, ZeroOrMore, Not, OneOrMore, EOF, ParserPython, \
|
||||
visit_parse_tree
|
||||
from arpeggio import RegExMatch as _
|
||||
from .peg import PEGVisitor
|
||||
from .peg import ParserPEG as ParserPEGOrig
|
||||
|
||||
__all__ = ['ParserPEG']
|
||||
|
||||
# Lexical invariants
|
||||
ASSIGNMENT = "="
|
||||
ORDERED_CHOICE = "/"
|
||||
ZERO_OR_MORE = "*"
|
||||
ONE_OR_MORE = "+"
|
||||
OPTIONAL = "?"
|
||||
UNORDERED_GROUP = "#"
|
||||
AND = "&"
|
||||
NOT = "!"
|
||||
OPEN = "("
|
||||
CLOSE = ")"
|
||||
|
||||
# PEG syntax rules
|
||||
def peggrammar(): return OneOrMore(rule), EOF
|
||||
def rule(): return rule_name, ASSIGNMENT, ordered_choice
|
||||
def ordered_choice(): return sequence, ZeroOrMore(ORDERED_CHOICE, sequence)
|
||||
def sequence(): return OneOrMore(prefix)
|
||||
def prefix(): return Optional([AND, NOT]), sufix
|
||||
def sufix(): return expression, Optional([OPTIONAL,
|
||||
ZERO_OR_MORE,
|
||||
ONE_OR_MORE,
|
||||
UNORDERED_GROUP])
|
||||
def expression(): return [regex, rule_crossref,
|
||||
(OPEN, ordered_choice, CLOSE),
|
||||
str_match], Not(ASSIGNMENT)
|
||||
|
||||
# PEG Lexical rules
|
||||
def regex(): return [("r'", _(r'''[^'\\]*(?:\\.[^'\\]*)*'''), "'"),
|
||||
('r"', _(r'''[^"\\]*(?:\\.[^"\\]*)*'''), '"')]
|
||||
def rule_name(): return _(r"[a-zA-Z_]([a-zA-Z_]|[0-9])*")
|
||||
def rule_crossref(): return rule_name
|
||||
def str_match(): return _(r'''(?s)('[^'\\]*(?:\\.[^'\\]*)*')|'''
|
||||
r'''("[^"\\]*(?:\\.[^"\\]*)*")''')
|
||||
def comment(): return "//", _(".*\n")
|
||||
|
||||
|
||||
class ParserPEG(ParserPEGOrig):
|
||||
|
||||
def _from_peg(self, language_def):
|
||||
parser = ParserPython(peggrammar, comment, reduce_tree=False,
|
||||
debug=self.debug)
|
||||
parser.root_rule_name = self.root_rule_name
|
||||
parse_tree = parser.parse(language_def)
|
||||
|
||||
return visit_parse_tree(parse_tree, PEGVisitor(self.root_rule_name,
|
||||
self.comment_rule_name,
|
||||
self.ignore_case,
|
||||
debug=self.debug))
|
221
myenv/lib/python3.12/site-packages/arpeggio/export.py
Normal file
221
myenv/lib/python3.12/site-packages/arpeggio/export.py
Normal file
@ -0,0 +1,221 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#######################################################################
|
||||
# Name: export.py
|
||||
# Purpose: Export support for arpeggio
|
||||
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
|
||||
# Copyright: (c) 2009 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
|
||||
# License: MIT License
|
||||
#######################################################################
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import io
|
||||
from arpeggio import Terminal
|
||||
|
||||
|
||||
class Exporter(object):
|
||||
"""
|
||||
Base class for all Exporters.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(Exporter, self).__init__()
|
||||
|
||||
# Export initialization
|
||||
self._render_set = set() # Used in rendering to prevent
|
||||
# rendering
|
||||
# of the same node multiple times
|
||||
|
||||
self._adapter_map = {} # Used as a registry of adapters to
|
||||
# ensure that the same adapter is
|
||||
# returned for the same adaptee object
|
||||
|
||||
def export(self, obj):
|
||||
"""
|
||||
Export of an obj to a string.
|
||||
"""
|
||||
self._outf = io.StringIO()
|
||||
self._export(obj)
|
||||
content = self._outf.getvalue()
|
||||
self._outf.close()
|
||||
return content
|
||||
|
||||
def exportFile(self, obj, file_name):
|
||||
"""
|
||||
Export of obj to a file.
|
||||
"""
|
||||
self._outf = io.open(file_name, "w", encoding="utf-8")
|
||||
self._export(obj)
|
||||
self._outf.close()
|
||||
|
||||
def _export(self, obj):
|
||||
self._outf.write(self._start())
|
||||
self._render_node(obj)
|
||||
self._outf.write(self._end())
|
||||
|
||||
def _start(self):
|
||||
"""
|
||||
Override this to specify the beginning of the graph representation.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def _end(self):
|
||||
"""
|
||||
Override this to specify the end of the graph representation.
|
||||
"""
|
||||
return ""
|
||||
|
||||
|
||||
class ExportAdapter(object):
|
||||
"""
|
||||
Base adapter class for the export support.
|
||||
Adapter should be defined for every export and graph type.
|
||||
|
||||
Attributes:
|
||||
adaptee: A node to adapt.
|
||||
export: An export object used as a context of the export.
|
||||
"""
|
||||
def __init__(self, node, export):
|
||||
self.adaptee = node # adaptee is adapted graph node
|
||||
self.export = export
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Support for DOT language
|
||||
|
||||
|
||||
class DOTExportAdapter(ExportAdapter):
|
||||
"""
|
||||
Base adapter class for the DOT export support.
|
||||
"""
|
||||
@property
|
||||
def id(self):
|
||||
"""
|
||||
Graph node unique identification.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def desc(self):
|
||||
"""
|
||||
Graph node textual description.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def neighbours(self):
|
||||
"""
|
||||
A set of adjacent graph nodes.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class PMDOTExportAdapter(DOTExportAdapter):
|
||||
"""
|
||||
Adapter for ParsingExpression graph types (parser model).
|
||||
"""
|
||||
@property
|
||||
def id(self):
|
||||
return id(self.adaptee)
|
||||
|
||||
@property
|
||||
def desc(self):
|
||||
return self.adaptee.desc
|
||||
|
||||
@property
|
||||
def neighbours(self):
|
||||
if not hasattr(self, "_neighbours"):
|
||||
self._neighbours= []
|
||||
|
||||
# Registry of adapters used in this export
|
||||
adapter_map = self.export._adapter_map
|
||||
|
||||
for c, n in enumerate(self.adaptee.nodes):
|
||||
if isinstance(n, PMDOTExportAdapter):
|
||||
# if the neighbour node is already adapted use that adapter
|
||||
self._neighbours.append((str(c + 1), n))
|
||||
elif id(n) in adapter_map:
|
||||
# current node is adaptee -> there is registered adapter
|
||||
self._neighbours.append((str(c + 1), adapter_map[id(n)]))
|
||||
else:
|
||||
# Create new adapter
|
||||
adapter = PMDOTExportAdapter(n, self.export)
|
||||
self._neighbours.append((str(c + 1), adapter))
|
||||
adapter_map[adapter.id] = adapter
|
||||
|
||||
return self._neighbours
|
||||
|
||||
|
||||
class PTDOTExportAdapter(PMDOTExportAdapter):
|
||||
"""
|
||||
Adapter for ParseTreeNode graph types.
|
||||
"""
|
||||
@property
|
||||
def neighbours(self):
|
||||
if isinstance(self.adaptee, Terminal):
|
||||
return []
|
||||
else:
|
||||
if not hasattr(self, "_neighbours"):
|
||||
self._neighbours = []
|
||||
for c, n in enumerate(self.adaptee):
|
||||
adapter = PTDOTExportAdapter(n, self.export)
|
||||
self._neighbours.append((str(c + 1), adapter))
|
||||
return self._neighbours
|
||||
|
||||
|
||||
class DOTExporter(Exporter):
|
||||
"""
|
||||
Export to DOT language (part of GraphViz, see http://www.graphviz.org/)
|
||||
"""
|
||||
def _render_node(self, node):
|
||||
if not node in self._render_set:
|
||||
self._render_set.add(node)
|
||||
self._outf.write('\n%s [label="%s"];' %
|
||||
(node.id, self._dot_label_esc(node.desc)))
|
||||
#TODO Comment handling
|
||||
# if hasattr(node, "comments") and root.comments:
|
||||
# retval += self.node(root.comments)
|
||||
# retval += '\n%s->%s [label="comment"]' % \
|
||||
#(id(root), id(root.comments))
|
||||
for name, n in node.neighbours:
|
||||
self._outf.write('\n%s->%s [label="%s"]' %
|
||||
(node.id, n.id, name))
|
||||
self._outf.write('\n')
|
||||
self._render_node(n)
|
||||
|
||||
def _start(self):
|
||||
return "digraph arpeggio_graph {"
|
||||
|
||||
def _end(self):
|
||||
return "\n}"
|
||||
|
||||
def _dot_label_esc(self, to_esc):
|
||||
to_esc = to_esc.replace("\\", "\\\\")
|
||||
to_esc = to_esc.replace('\"', '\\"')
|
||||
to_esc = to_esc.replace('\n', '\\n')
|
||||
return to_esc
|
||||
|
||||
|
||||
class PMDOTExporter(DOTExporter):
|
||||
"""
|
||||
A convenience DOTExport extension that uses ParserExpressionDOTExportAdapter
|
||||
"""
|
||||
def export(self, obj):
|
||||
return super(PMDOTExporter, self).\
|
||||
export(PMDOTExportAdapter(obj, self))
|
||||
|
||||
def exportFile(self, obj, file_name):
|
||||
return super(PMDOTExporter, self).\
|
||||
exportFile(PMDOTExportAdapter(obj, self), file_name)
|
||||
|
||||
|
||||
class PTDOTExporter(DOTExporter):
|
||||
"""
|
||||
A convenience DOTExport extension that uses PTDOTExportAdapter
|
||||
"""
|
||||
def export(self, obj):
|
||||
return super(PTDOTExporter, self).\
|
||||
export(PTDOTExportAdapter(obj, self))
|
||||
|
||||
def exportFile(self, obj, file_name):
|
||||
return super(PTDOTExporter, self).\
|
||||
exportFile(PTDOTExportAdapter(obj, self), file_name)
|
290
myenv/lib/python3.12/site-packages/arpeggio/peg.py
Normal file
290
myenv/lib/python3.12/site-packages/arpeggio/peg.py
Normal file
@ -0,0 +1,290 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#######################################################################
|
||||
# Name: peg.py
|
||||
# Purpose: Implementing PEG language
|
||||
# Author: Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
|
||||
# Copyright: (c) 2009-2017 Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
|
||||
# License: MIT License
|
||||
#######################################################################
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
import sys
|
||||
import codecs
|
||||
import copy
|
||||
import re
|
||||
from arpeggio import Sequence, OrderedChoice, Optional, ZeroOrMore, \
|
||||
OneOrMore, UnorderedGroup, EOF, EndOfFile, PTNodeVisitor, \
|
||||
SemanticError, CrossRef, GrammarError, StrMatch, And, Not, Parser, \
|
||||
ParserPython, visit_parse_tree
|
||||
from arpeggio import RegExMatch as _
|
||||
|
||||
if sys.version < '3':
|
||||
text = unicode
|
||||
else:
|
||||
text = str
|
||||
|
||||
__all__ = ['ParserPEG']
|
||||
|
||||
# Lexical invariants
|
||||
LEFT_ARROW = "<-"
|
||||
ORDERED_CHOICE = "/"
|
||||
ZERO_OR_MORE = "*"
|
||||
ONE_OR_MORE = "+"
|
||||
OPTIONAL = "?"
|
||||
UNORDERED_GROUP = "#"
|
||||
AND = "&"
|
||||
NOT = "!"
|
||||
OPEN = "("
|
||||
CLOSE = ")"
|
||||
|
||||
|
||||
# PEG syntax rules
|
||||
def peggrammar(): return OneOrMore(rule), EOF
|
||||
def rule(): return rule_name, LEFT_ARROW, ordered_choice, ";"
|
||||
def ordered_choice(): return sequence, ZeroOrMore(ORDERED_CHOICE, sequence)
|
||||
def sequence(): return OneOrMore(prefix)
|
||||
def prefix(): return Optional([AND, NOT]), sufix
|
||||
def sufix(): return expression, Optional([OPTIONAL,
|
||||
ZERO_OR_MORE,
|
||||
ONE_OR_MORE,
|
||||
UNORDERED_GROUP])
|
||||
def expression(): return [regex, rule_crossref,
|
||||
(OPEN, ordered_choice, CLOSE),
|
||||
str_match]
|
||||
|
||||
# PEG Lexical rules
|
||||
def regex(): return [("r'", _(r'''[^'\\]*(?:\\.[^'\\]*)*'''), "'"),
|
||||
('r"', _(r'''[^"\\]*(?:\\.[^"\\]*)*'''), '"')]
|
||||
def rule_name(): return _(r"[a-zA-Z_]([a-zA-Z_]|[0-9])*")
|
||||
def rule_crossref(): return rule_name
|
||||
def str_match(): return _(r'''(?s)('[^'\\]*(?:\\.[^'\\]*)*')|'''
|
||||
r'''("[^"\\]*(?:\\.[^"\\]*)*")''')
|
||||
def comment(): return "//", _(".*\n")
|
||||
|
||||
|
||||
# Escape sequences supported in PEG literal string matches
|
||||
PEG_ESCAPE_SEQUENCES_RE = re.compile(r"""
|
||||
\\ ( [\n\\'"abfnrtv] | # \\x single-character escapes
|
||||
[0-7]{1,3} | # \\ooo octal escape
|
||||
x[0-9A-Fa-f]{2} | # \\xXX hex escape
|
||||
u[0-9A-Fa-f]{4} | # \\uXXXX hex escape
|
||||
U[0-9A-Fa-f]{8} | # \\UXXXXXXXX hex escape
|
||||
N\{[- 0-9A-Z]+\} # \\N{name} Unicode name or alias
|
||||
)
|
||||
""", re.VERBOSE | re.UNICODE)
|
||||
|
||||
|
||||
class PEGVisitor(PTNodeVisitor):
|
||||
"""
|
||||
Visitor that transforms parse tree to a PEG parser for the given language.
|
||||
"""
|
||||
|
||||
def __init__(self, root_rule_name, comment_rule_name, ignore_case,
|
||||
*args, **kwargs):
|
||||
super(PEGVisitor, self).__init__(*args, **kwargs)
|
||||
self.root_rule_name = root_rule_name
|
||||
self.comment_rule_name = comment_rule_name
|
||||
self.ignore_case = ignore_case
|
||||
# Used for linking phase
|
||||
self.peg_rules = {
|
||||
"EOF": EndOfFile()
|
||||
}
|
||||
|
||||
def visit_peggrammar(self, node, children):
|
||||
|
||||
def _resolve(node):
|
||||
"""
|
||||
Resolves CrossRefs from the parser model.
|
||||
"""
|
||||
|
||||
if node in self.resolved:
|
||||
return node
|
||||
self.resolved.add(node)
|
||||
|
||||
def get_rule_by_name(rule_name):
|
||||
try:
|
||||
return self.peg_rules[rule_name]
|
||||
except KeyError:
|
||||
raise SemanticError("Rule \"{}\" does not exists."
|
||||
.format(rule_name))
|
||||
|
||||
def resolve_rule_by_name(rule_name):
|
||||
|
||||
if self.debug:
|
||||
self.dprint("Resolving crossref {}".format(rule_name))
|
||||
|
||||
resolved_rule = get_rule_by_name(rule_name)
|
||||
while type(resolved_rule) is CrossRef:
|
||||
target_rule = resolved_rule.target_rule_name
|
||||
resolved_rule = get_rule_by_name(target_rule)
|
||||
|
||||
# If resolved rule hasn't got the same name it
|
||||
# should be cloned and preserved in the peg_rules cache
|
||||
if resolved_rule.rule_name != rule_name:
|
||||
resolved_rule = copy.copy(resolved_rule)
|
||||
resolved_rule.rule_name = rule_name
|
||||
self.peg_rules[rule_name] = resolved_rule
|
||||
if self.debug:
|
||||
self.dprint("Resolving: cloned to {} = > {}"
|
||||
.format(resolved_rule.rule_name,
|
||||
resolved_rule.name))
|
||||
return resolved_rule
|
||||
|
||||
if isinstance(node, CrossRef):
|
||||
# The root rule is a cross-ref
|
||||
resolved_rule = resolve_rule_by_name(node.target_rule_name)
|
||||
return _resolve(resolved_rule)
|
||||
else:
|
||||
# Resolve children nodes
|
||||
for i, n in enumerate(node.nodes):
|
||||
node.nodes[i] = _resolve(n)
|
||||
self.resolved.add(node)
|
||||
return node
|
||||
|
||||
# Find root and comment rules
|
||||
self.resolved = set()
|
||||
comment_rule = None
|
||||
for rule in children:
|
||||
if rule.rule_name == self.root_rule_name:
|
||||
root_rule = _resolve(rule)
|
||||
if rule.rule_name == self.comment_rule_name:
|
||||
comment_rule = _resolve(rule)
|
||||
|
||||
assert root_rule, "Root rule not found!"
|
||||
return root_rule, comment_rule
|
||||
|
||||
def visit_rule(self, node, children):
|
||||
rule_name = children[0]
|
||||
if len(children) > 2:
|
||||
retval = Sequence(nodes=children[1:])
|
||||
else:
|
||||
retval = children[1]
|
||||
|
||||
retval.rule_name = rule_name
|
||||
retval.root = True
|
||||
|
||||
# Keep a map of parser rules for cross reference
|
||||
# resolving.
|
||||
self.peg_rules[rule_name] = retval
|
||||
return retval
|
||||
|
||||
def visit_sequence(self, node, children):
|
||||
if len(children) > 1:
|
||||
return Sequence(nodes=children[:])
|
||||
else:
|
||||
# If only one child rule exists reduce.
|
||||
return children[0]
|
||||
|
||||
def visit_ordered_choice(self, node, children):
|
||||
if len(children) > 1:
|
||||
retval = OrderedChoice(nodes=children[:])
|
||||
else:
|
||||
# If only one child rule exists reduce.
|
||||
retval = children[0]
|
||||
return retval
|
||||
|
||||
def visit_prefix(self, node, children):
|
||||
if len(children) == 2:
|
||||
if children[0] == NOT:
|
||||
retval = Not()
|
||||
else:
|
||||
retval = And()
|
||||
if type(children[1]) is list:
|
||||
retval.nodes = children[1]
|
||||
else:
|
||||
retval.nodes = [children[1]]
|
||||
else:
|
||||
# If there is no optional prefix reduce.
|
||||
retval = children[0]
|
||||
|
||||
return retval
|
||||
|
||||
def visit_sufix(self, node, children):
|
||||
if len(children) == 2:
|
||||
if type(children[0]) is list:
|
||||
nodes = children[0]
|
||||
else:
|
||||
nodes = [children[0]]
|
||||
if children[1] == ZERO_OR_MORE:
|
||||
retval = ZeroOrMore(nodes=nodes)
|
||||
elif children[1] == ONE_OR_MORE:
|
||||
retval = OneOrMore(nodes=nodes)
|
||||
elif children[1] == OPTIONAL:
|
||||
retval = Optional(nodes=nodes)
|
||||
else:
|
||||
retval = UnorderedGroup(nodes=nodes[0].nodes)
|
||||
else:
|
||||
retval = children[0]
|
||||
|
||||
return retval
|
||||
|
||||
def visit_rule_crossref(self, node, children):
|
||||
return CrossRef(node.value)
|
||||
|
||||
def visit_regex(self, node, children):
|
||||
match = _(children[0], ignore_case=self.ignore_case)
|
||||
match.compile()
|
||||
return match
|
||||
|
||||
def visit_str_match(self, node, children):
|
||||
match_str = node.value[1:-1]
|
||||
|
||||
# Scan the string literal, and sequentially match those escape
|
||||
# sequences which are syntactically valid Python. Attempt to convert
|
||||
# those, raising ``GrammarError`` for any semantically invalid ones.
|
||||
def decode_escape(match):
|
||||
try:
|
||||
return codecs.decode(match.group(0), "unicode_escape")
|
||||
except UnicodeDecodeError:
|
||||
raise GrammarError("Invalid escape sequence '%s'." %
|
||||
match.group(0))
|
||||
match_str = PEG_ESCAPE_SEQUENCES_RE.sub(decode_escape, match_str)
|
||||
|
||||
return StrMatch(match_str, ignore_case=self.ignore_case)
|
||||
|
||||
|
||||
class ParserPEG(Parser):
|
||||
|
||||
def __init__(self, language_def, root_rule_name, comment_rule_name=None,
|
||||
*args, **kwargs):
|
||||
"""
|
||||
Constructs parser from textual PEG definition.
|
||||
|
||||
Args:
|
||||
language_def (str): A string describing language grammar using
|
||||
PEG notation.
|
||||
root_rule_name(str): The name of the root rule.
|
||||
comment_rule_name(str): The name of the rule for comments.
|
||||
"""
|
||||
super(ParserPEG, self).__init__(*args, **kwargs)
|
||||
self.root_rule_name = root_rule_name
|
||||
self.comment_rule_name = comment_rule_name
|
||||
|
||||
# PEG Abstract Syntax Graph
|
||||
self.parser_model, self.comments_model = self._from_peg(language_def)
|
||||
# Comments should be optional and there can be more of them
|
||||
if self.comments_model:
|
||||
self.comments_model.root = True
|
||||
self.comments_model.rule_name = comment_rule_name
|
||||
|
||||
# In debug mode export parser model to dot for
|
||||
# visualization
|
||||
if self.debug:
|
||||
from arpeggio.export import PMDOTExporter
|
||||
root_rule = self.parser_model.rule_name
|
||||
PMDOTExporter().exportFile(
|
||||
self.parser_model, "{}_peg_parser_model.dot".format(root_rule))
|
||||
|
||||
def _parse(self):
|
||||
return self.parser_model.parse(self)
|
||||
|
||||
def _from_peg(self, language_def):
|
||||
parser = ParserPython(peggrammar, comment, reduce_tree=False,
|
||||
debug=self.debug)
|
||||
parser.root_rule_name = self.root_rule_name
|
||||
parse_tree = parser.parse(language_def)
|
||||
|
||||
return visit_parse_tree(parse_tree, PEGVisitor(self.root_rule_name,
|
||||
self.comment_rule_name,
|
||||
self.ignore_case,
|
||||
debug=self.debug))
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
|
||||
import pytest
|
||||
from arpeggio.cleanpeg import ParserPEG
|
||||
|
||||
input = """\
|
||||
<?php
|
||||
class Enum {
|
||||
protected $self = array();
|
||||
public function __construct( $fun ) {
|
||||
$args = func_get_args();
|
||||
for( $i=0, $n=count($args); $i<$n; $i++ )
|
||||
$this->add($args[$i]);
|
||||
}
|
||||
|
||||
public function __get( $name = null ) {
|
||||
return $this->self[$name];
|
||||
}
|
||||
|
||||
public function add( $name = null, $enum = null ) {
|
||||
if( isset($enum) )
|
||||
$this->self[$name] = $enum;
|
||||
else
|
||||
$this->self[$name] = end($this->self) + 1;
|
||||
}
|
||||
|
||||
|
||||
"""
|
||||
|
||||
grammar = r"""
|
||||
|
||||
calc = test
|
||||
|
||||
test = visibility ws* function_keyword ws* word ws* arguments* ws*
|
||||
function = visibility "function" word arguments block
|
||||
block = "{" ws* r'[^}]*' ws* "}"
|
||||
arguments = "(" ws* argument* ws* ")"
|
||||
|
||||
// $types = array("cappuccino")
|
||||
// arguments end with optional comma
|
||||
argument = ( byvalue / byreference ) ("=" value )* ","*
|
||||
byreference = "&" byvalue
|
||||
byvalue = variable
|
||||
|
||||
// value may be variable or array or string or any php type
|
||||
value = variable
|
||||
|
||||
visibility = "public" / "protected" / "private"
|
||||
function_keyword = "function"
|
||||
|
||||
variable = "$" literal r'[a-zA-Z0-9_]*'
|
||||
word = r'[a-zA-Z0-9_]+'
|
||||
literal = r'[a-zA-Z]+'
|
||||
|
||||
comment = r'("//.*")|("/\*.*\*/")'
|
||||
symbol = r'[\W]+'
|
||||
|
||||
anyword = r'[\w]*' ws*
|
||||
ws = r'[\s]+'
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def argument(parser, node, children):
|
||||
"""
|
||||
Removes parenthesis if exists and returns what was contained inside.
|
||||
"""
|
||||
print(children)
|
||||
|
||||
if len(children) == 1:
|
||||
print(children[0])
|
||||
return children[0]
|
||||
|
||||
sign = -1 if children[0] == '-' else 1
|
||||
|
||||
return sign * children[-1]
|
||||
|
||||
# Rules are mapped to semantic actions
|
||||
sem_actions = {
|
||||
"argument": argument,
|
||||
}
|
||||
|
||||
|
||||
def test_issue_16():
|
||||
|
||||
parser = ParserPEG(grammar, "calc", skipws=False)
|
||||
|
||||
input_expr = """public function __construct( )"""
|
||||
parse_tree = parser.parse(input_expr)
|
||||
|
||||
# Do semantic analysis. Do not use default actions.
|
||||
asg = parser.getASG(sem_actions=sem_actions, defaults=False)
|
||||
|
||||
assert asg
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#######################################################################
|
||||
# Name: test_optional_in_choice
|
||||
# Purpose: Optional matches always succeeds but should not stop alternative
|
||||
# probing on failed match.
|
||||
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
|
||||
# Copyright: (c) 2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
|
||||
# License: MIT License
|
||||
#######################################################################
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Grammar
|
||||
from arpeggio import ParserPython, Optional, EOF
|
||||
|
||||
def g(): return [Optional('first'), Optional('second'), Optional('third')], EOF
|
||||
|
||||
|
||||
def test_optional_in_choice():
|
||||
parser = ParserPython(g)
|
||||
input_str = "second"
|
||||
parse_tree = parser.parse(input_str)
|
||||
assert parse_tree is not None
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,17 @@
|
||||
number_token = r'(\d+|\d+\.\d*|\d*\.\d+)'
|
||||
identifier_token = r'[a-zA-Z_][a-zA-Z0-9_]*'
|
||||
|
||||
unqualified_identifier_expression = identifier_token
|
||||
qualified_identifier_expression = identifier_token ( "." identifier_token )*
|
||||
|
||||
unary_expression = number_token / qualified_identifier_expression
|
||||
|
||||
method_call_expression = qualified_identifier_expression "(" ( rvalue_expression ( "," rvalue_expression )* )? ")"
|
||||
collection_index_expression = qualified_identifier_expression "[" rvalue_expression ( "," rvalue_expression )* "]"
|
||||
|
||||
lvalue_expression = qualified_identifier_expression
|
||||
rvalue_expression = collection_index_expression / method_call_expression / unary_expression
|
||||
|
||||
expression = collection_index_expression / method_call_expression / unary_expression
|
||||
|
||||
belang = expression* EOF
|
@ -0,0 +1,17 @@
|
||||
number_token = r'(\d+|\d+\.\d*|\d*\.\d+)'
|
||||
identifier_token = r'[a-zA-Z_][a-zA-Z0-9_]*'
|
||||
|
||||
qualified_identifier_expression = identifier_token ( "." identifier_token )*
|
||||
|
||||
unary_expression = number_token / qualified_identifier_expression
|
||||
|
||||
lvalue_expression = qualified_identifier_expression
|
||||
rvalue_expression = expression
|
||||
|
||||
expression = compound_expression / unary_expression
|
||||
compound_expression = qualified_identifier_expression (method_call_par / index_par)+
|
||||
|
||||
method_call_par = "(" (rvalue_expression ("," rvalue_expression)* )? ")"
|
||||
index_par = "[" rvalue_expression ("," rvalue_expression)* "]"
|
||||
|
||||
belang = expression* EOF
|
@ -0,0 +1,19 @@
|
||||
import os
|
||||
from arpeggio.cleanpeg import ParserPEG
|
||||
|
||||
|
||||
def test_issue_22():
|
||||
"""
|
||||
Infinite recursion during resolving of a grammar given in a clean PEG
|
||||
notation.
|
||||
"""
|
||||
current_dir = os.path.dirname(__file__)
|
||||
|
||||
grammar1 = open(os.path.join(current_dir, 'grammar1.peg')).read()
|
||||
parser1 = ParserPEG(grammar1, 'belang')
|
||||
parser1.parse('a [0]')
|
||||
parser1.parse('a (0)')
|
||||
|
||||
grammar2 = open(os.path.join(current_dir, 'grammar2.peg')).read()
|
||||
parser2 = ParserPEG(grammar2, 'belang', debug=True)
|
||||
parser2.parse('a [0](1)[2]')
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user