Version 0.2 finally first commit

This commit is contained in:
Doc
2025-10-01 20:27:37 -04:00
commit 71e933be72
22 changed files with 1509 additions and 0 deletions

126
.dockerignore Normal file
View File

@@ -0,0 +1,126 @@
# Secrets (Avoid copying sensitive files into the image)
.credentials
.secrets
auth.yaml
*._py_
# Editors (Editor-specific config files)
.vscode/
.idea/
# Vagrant (Vagrant-specific config)
.vagrant/
# Mac/OSX metadata files
.DS_Store
# Windows metadata files
Thumbs.db
# Python compiled bytecode and cache
__pycache__/
*.py[cod]
*$py.class
# C extensions (if not needed during runtime)
*.so
# Distribution / packaging
.Python
bin/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller (Avoid including PyInstaller build artifacts)
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports (Not needed for the Docker image)
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
cover/
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django, Flask, and other project-specific files (database, logs, etc.)
*.log
local_settings.py
db.sqlite3
db.sqlite-journal
instance/
.webassets-cache
# Scrapy
.scrapy
# Sphinx documentation build output
docs/_build/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# Virtual environments (Avoid including your virtual environments)
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Poetry and other package manager locks
poetry.lock
pdm.lock
.pdm.toml
.pdm-python
.pdm-build/
# Python environment files (pyenv, pdm, etc.)
.python-version
__pypackages__/
# Celery
celerybeat-schedule
celerybeat.pid
# Other type checkers and linters (mypy, pyre, etc.)
.mypy_cache/
.dmypy.json
dmypy.json
.pyre/
.pytype/
cython_debug/

170
.gitignore vendored Normal file
View File

@@ -0,0 +1,170 @@
# Secrets
.credentials
.secrets
auth.yaml
*._py_
# Editors
.vscode/
.idea/
# Vagrant
.vagrant/
# Mac/OSX
.DS_Store
# Windows
Thumbs.db
# Source for the following rules: https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
bin/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
cover/
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
Pipfile.lock
# poetry
poetry.lock
# pdm
pdm.lock
.pdm.toml
.pdm-python
.pdm-build/
__pypackages__/
# celery beat schedule file
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
distro_client/subdomain_client.tar.gz
config/config.yaml

24
Dockerfile Normal file
View File

@@ -0,0 +1,24 @@
# Dockerfile
FROM python:3.10-slim
# Set working directory
WORKDIR /app
# Copy requirements file and install dependencies
# COPY requirements.txt .
RUN pip install --no-cache-dir hatch
# Copy the entire project to the working directory
COPY ./src /app/src
COPY ./README.md /app/README.md
COPY ./LICENSE /app/LICENSE
COPY ./pyproject.toml /app/pyproject.toml
COPY ./scripts /app/scripts
#COPY ./tests /app/tests
COPY ./docs /app/docs
# Expose the port the app runs on (adjust based on your app's settings)
EXPOSE 5232
# Command to run the app
CMD ["hatch","run","subdomain_server","--port","5232","--config-path","/app/config/config.yaml","--debug"]

6
LICENSE Normal file
View File

@@ -0,0 +1,6 @@
Copyright 2024 BipolarExpedition.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

147
Makefile Normal file
View File

@@ -0,0 +1,147 @@
# Variables
VENV_DIR := .venv
PYTHON := $(VENV_DIR)/bin/python
SH := /bin/sh
PIP := $(VENV_DIR)/bin/pip
UPDATE_SCRIPT := scripts/update-variables.sh
SRC := src
REQ_FILE := requirements.txt
DEV_REQ_FILE := requirements-dev.txt
DOC_SRC := docs/source
DOC_BUILD := docs/build
SPHINXBUILD := $(VENV_DIR)/bin/sphinx-build
COV_HTML := coverage_html_report
PROJECT_VERSION := 0.0.1
AUTHOR_NAME := "BipolarExpedition(Doc1979)"
AUTHOR_EMAIL := "lastdoc39@gmail.com"
# This makefile is a work in progress. It currently does not
# support any of the functions you would expect. The only
# functions you can use are update-metadata
# Update metadata in files
.PHONY: update-metadata
update-metadata:
$(SH) $(UPDATE_SCRIPT)
@echo "Metadata updated in source files."
# # Setup environment
# .PHONY: setup
# setup:
# rm -rf $(VENV_DIR)
# python3 -m venv $(VENV_DIR)
# $(PIP) install -r $(REQ_FILE)
# .PHONY: setup-dev
# setup-dev:
# rm -rf $(VENV_DIR)
# python3 -m venv $(VENV_DIR)
# $(PIP) install -r $(DEV_REQ_FILE)
# Default target: help
.PHONY: help
help:
@echo "Available targets:"
@echo " help - Show this help message"
# @echo " setup - Set up virtual environment and install dependencies"
# @echo " clean - Clean up the project directory"
# @echo " build - Build the project with hatch"
# @echo " pyinstaller-package - Package the project with PyInstaller for Windows"
# @echo " test - Run tests with pytest"
# @echo " coverage - Run tests and check coverage"
# @echo " format - Format code with ruff"
# @echo " run - Run the project (in venv)"
# @echo " debug - Run the project with debugging enabled"
# @echo " changelog - Generate a changelog since the last commit"
# @echo " deps - Check for outdated dependencies"
# @echo " docs-html - Generate HTML documentation"
# @echo " docs-pdf - Generate PDF documentation"
# @echo " docs - Generate all documentation formats"
@echo " update-metadata - Update metadata (version, author, etc.) in files"
# # Setup environment
# .PHONY: setup
# setup:
# rm -rf $(VENV_DIR)
# python3 -m venv $(VENV_DIR)
# $(PIP) install -r $(REQ_FILE)
# .PHONY: setup-dev
# setup-dev:
# rm -rf $(VENV_DIR)
# python3 -m venv $(VENV_DIR)
# $(PIP) install -r $(DEV_REQ_FILE)
# # Clean up build, cache, and coverage files
# .PHONY: clean
# clean:
# rm -rf $(VENV_DIR) $(DOC_BUILD) build dist *.egg-info .pytest_cache __pycache__ src/__pycache__
# # Build project using hatch
# .PHONY: build
# build:
# hatch build
# # PyInstaller for packaging the project into a binary for Windows
# .PHONY: pyinstaller-package
# pyinstaller-package:
# $(VENV_DIR)/bin/pyinstaller --onefile src/cli.py
# # Run tests
# .PHONY: test
# test:
# $(VENV_DIR)/bin/pytest
# # Run tests and generate coverage report
# .PHONY: coverage
# coverage:
# $(VENV_DIR)/bin/pytest --cov=$(SRC) --cov-report=html --cov-report=term
# # Apply formatting with ruff
# .PHONY: format
# format:
# $(VENV_DIR)/bin/ruff --fix .
# # Run the project in a virtual environment
# .PHONY: run
# run:
# $(PYTHON) src/cli.py
# # Debugging with an environment variable set
# .PHONY: debug
# debug:
# DODEBUG=1 $(PYTHON) src/cli.py
# # Generate a changelog (assuming git is used)
# .PHONY: changelog
# changelog:
# git log --pretty=format:"%h %ad | %s%d [%an]" --date=short > CHANGELOG.md
# # Check for outdated dependencies
# .PHONY: deps
# deps:
# $(PIP) list --outdated
# # Generate HTML documentation
# .PHONY: docs-html
# docs-html:
# $(SPHINXBUILD) -b html $(DOC_SRC) $(DOC_BUILD)/html
# @echo "HTML documentation generated at $(DOC_BUILD)/html"
# # Generate PDF documentation (using LaTeX)
# .PHONY: docs-pdf
# docs-pdf:
# $(SPHINXBUILD) -b latex $(DOC_SRC) $(DOC_BUILD)/latex
# make -C $(DOC_BUILD)/latex all-pdf
# @echo "PDF documentation generated at $(DOC_BUILD)/latex"
# # Generate all documentation formats
# .PHONY: docs
# docs: docs-html docs-pdf
# @echo "All documentation formats generated."
# # Update metadata in files
# .PHONY: update-metadata
# update-metadata:
# $(SH) $(UPDATE_SCRIPT)
# @echo "Metadata updated in source files."

76
README.md Normal file
View File

@@ -0,0 +1,76 @@
[![MIT License][license-shield]][license-url]
[![Python][python-shield]][python-url]
[![Hatch][hatch-shield]][hatch-url]
[![VSCode][vscode-shield]][vscode-url]
# subdomain_server
## Description
The server component for authorized, non-owners to request a subdomain to be updated.
## Table of Contents
- [subdomain_server](#projectname)
- [Description](#description)
- [Table of Contents](#table-of-contents)
- [Features](#features)
- [Installation](#installation)
- [Usage](#usage)
- [Configuration](#configuration)
- [Roadmap](#roadmap)
- [Contributing](#contributing)
- [Testing](#testing)
- [Documentation](#documentation)
- [License](#license)
- [Acknowledgments](#acknowledgments)
- [Contact Information](#contact-information)
## Features
- Example Feature 1
- Example Feature 2
## Installation
Instructions to install the project.
## Usage
Examples of how to use the project.
## Configuration ⚙
This will have information on configuration options.
## Roadmap 🗺
The roadmap has not been made yet.
## Contributing
The guidelines for contributing to the project have not been defined yet.
## Testing 📝
The instructions to run tests have not been written yet.
## Documentation 📚
There is currently no link to additional documentation.
## License
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
## Acknowledgments
- Resources Used
- [Choose an Open Source License](https://choosealicense.com)
- [GitHub Emoji Cheat Sheet](https://www.webpagefx.com/tools/emoji-cheat-sheet)
- [Img Shields](https://shields.io)
## Contact Information 📧
- BipolarExpedition(Doc1979) <lastdoc39@gmail.com>
- https://github.com/BipolarExpedition
- https://github.com/BipolarExpedition/subdomain_server
[license-shield]: https://img.shields.io/badge/License-MIT-yellow.svg
[license-url]: https://opensource.org/licenses/MIT
[python-shield]: https://img.shields.io/badge/Python-3.8%20%7C%203.9%20%7C%203.10-blue
[python-url]: https://www.python.org/
[hatch-shield]: https://img.shields.io/badge/Built%20With-Hatch-orange
[hatch-url]: https://hatch.pypa.io/
[vscode-shield]: https://img.shields.io/badge/IDE-VSCode-blue
[vscode-url]: https://code.visualstudio.com/

View File

@@ -0,0 +1,132 @@
#!/bin/sh
echo -e "\nInstalling Subdomain Client...\n"
# Define constants
echo "Reading constants..."
DEPENDENCIES=requests
PYSCRIPT=refresh_subdomain.py
INSTALLDIR=~/.local/share/refresh_subdomain
BINDIR=~/.local/bin
ENVDIR=~/.local/share/refresh_subdomain/venv
SYSTEMDDIR=~/.config/systemd/user
FILES2COPY=refresh_subdomain.py
echo -e "Constants:\n\tDependencies: $DEPENDENCIES\n\tInstall Directory: $INSTALLDIR\n\tLaunch Script: $BINDIR/refresh_subdomain\n\tEnvironment Directory: $ENVDIR\n\tFiles to Copy: $FILES2COPY\n"
echo "Checking install files..."
# Make sure files to install are in current directory
MISSINGFILES=false
for file in $FILES2COPY; do
if [ ! -f "$file" ]; then
MISSINGFILES=true
fi
done
if [ "$MISSINGFILES" = true ]; then
echo "Error: Required Install files not found in current directory"
echo " Required files: $FILES2COPY"
echo " Run this script from the root of the project directory"
exit 1
fi
# Test for venv
echo "Checking for venv..."
DEP_VENV=false
python3 -c "import venv" && DEP_VENV=true || DEP_VENV=false
if [ "$DEP_VENV" = false ]; then
echo "Venv not installed. Installing venv..."
python3 -m pip install venv
DEP_VENV=false
python3 -c "import venv" && DEP_VENV=true || DEP_VENV=false
if [ "$DEP_VENV" = false ]; then
echo "Error: venv not installed"
exit 1
fi
fi
# Make install directory
echo "Creating install directory..."
if [ ! -d "$INSTALLDIR" ]; then
mkdir -p "$INSTALLDIR"
fi
# Copy files to install directory
echo "Copying files to install directory..."
for file in $FILES2COPY; do
cp "$file" "${INSTALLDIR}/"
done
cd "$INSTALLDIR"
# Create and activate venv if needed
echo "Creating and activating venv..."
if [ ! -d "$ENVDIR" ]; then
python3 -m venv "$ENVDIR"
fi
source "$ENVDIR/bin/activate"
# Install Deps
echo "Installing Dependencies..."
for dep in $DEPENDENCIES; do
pip install "$dep"
done
# deactivate venv
echo "Deactivating venv..."
deactivate
if [ ! -d "$BINDIR" ]; then
echo "Creating ~/.local/bin to hold launch script..."
mkdir -p "$BINDIR"
fi
# Setup launch script
echo "Creating launch script..."
cat <<EOF > "$BINDIR/refresh_subdomain"
#!/bin/bash
cd "$INSTALLDIR"
source "$ENVDIR/bin/activate"
python3 "$PYSCRIPT" \$@"
EOF
chmod +x "$BINDIR/refresh_subdomain"
# Setup systemd service
if [ ! -d "$SYSTEMDDIR" ]; then
echo "Creating systemd user service directory..."
mkdir -p "$SYSTEMDDIR"
fi
echo "Creating systemd user service..."
cat <<EOF > $SYSTEMDDIR/subdomain.service
/refresh_subdomain.service
[Unit]
Description=Refresh Subdomain Service
[Service]
ExecStart=$BINDIR/refresh_subdomain
EOF
echo "Creating systemd user timer..."
cat <<EOF > $SYSTEMDDIR/subdomain.timer
[Unit]
Description=Refresh Subdomain Timer
[Timer]
OnCalendar=hourly
Persistent=true
[Install]
WantedBy=timers.target
EOF
echo "Enabling systemd service..."
systemctl --user daemon-reload
systemctl --user enable subdomain.timer
echo "Installation Complete"

View File

@@ -0,0 +1,20 @@
#!/bin/sh
# Creates a tar.gz of the subdomain client files
rm -f subdomain_client.tar.gz
rm -rf tmp
mkdir tmp
cp -a ./install_subdomain_client.sh tmp/
cp -a ../src/bpe_subdomain_server/refresh_subdomain.py tmp/
cd tmp
chmod +x install_subdomain_client.sh
tar -zcvf ../subdomain_client.tar.gz install_subdomain_client.sh refresh_subdomain.py
cd ..
rm -rf tmp
echo "Created subdomain_client.tar.gz"

22
docker-compose.yaml Normal file
View File

@@ -0,0 +1,22 @@
services:
subdomain:
container_name: subdomain_service
build:
context: .
dockerfile: Dockerfile
networks:
backend-net:
ipv4_address: 172.18.0.52
ports:
- "5232:5232"
volumes:
- /home/doc/src/my/subdomain_server/config:/app/config:ro
environment:
- AUTH_FILE=/app/config/config.yaml
restart: always
networks:
backend-net:
external: true
# cp -ra ./src README.md LICENSE pyproject.toml scripts tests docs docker/project/

18
docs/conf.py Normal file
View File

@@ -0,0 +1,18 @@
import os
import sys
# Set project root directory for Sphinx documentation
sys.path.insert(0, os.path.abspath('../src'))
# Project information
project = 'subdomain_server'
author = 'BipolarExpedition({authorName})'
release = '0.0.1'
# Configure other options as necessary, such as:
html_theme = 'alabaster' # Example, can choose others like 'sphinx_rtd_theme'
# simply add the extension to your list of extensions
extensions = ['myst_parser']
source_suffix = ['.rst', '.md']

61
pyproject.toml Normal file
View File

@@ -0,0 +1,61 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "bpe_subdomain_server"
version = "0.0.2"
description = "The server component for authorized, non-owners to request a subdomain to be updated."
readme = "README.md"
authors = [
{ name = "BipolarExpedition(Doc1979)" },
{ name = "Doc1979", email = "lastdoc39@gmail.com" },
]
requires-python = ">=3.8"
license = { file = "LICENSE" }
# keywords = []
classifiers = [
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
]
dependencies = ["Flask", "PyYAML", "Typer", "requests"]
[project.urls]
homepage = "https://github.com/BipolarExpedition/subdomain_server"
repository = "https://github.com/BipolarExpedition/subdomain_server"
issues = "https://github.com/BipolarExpedition/subdomain_server/issues"
# documentation = "https://readthedocs.org"
# changelog = "https://github.com/me/spam/blob/master/CHANGELOG.md"
[project.scripts]
subdomain_server = "bpe_subdomain_server.cli:daemon"
# [project.gui-scripts]
# subdomain_server_gui = "bpe_subdomain_server.gui-tk:main"
#[project.optional-dependencies]
# gui = ["tkinter"]
# cli = [
# "rich",
# "click",
# ]
[tool.hatch.build]
skip-excluded-dirs = true
exclude = [".secrets", ".credentials", ".vscode", ".idea"]
[tool.hatch.envs.default]
python = "3.10"
[tool.hatch.envs.test]
python = "3.10"
dependencies = ["ruff", "myst-parser", "sphinx", "pytest"]
[tool.pytest.ini_options]
testpaths = ["tests"]
addopts = "-v -s"

41
scripts/project-cli Normal file
View File

@@ -0,0 +1,41 @@
#!/bin/sh
# save to restore working directory later
ORIGPATH="$(pwd)"
# configure path information and working directory
SCRIPTPATH="$0"
cd "$(dirname ${SCRIPTPATH})/.."
PROJECTDIR="$(pwd)"
# source template_vars.env as defined in SOURCEFILE
SOURCEFILE="${PROJECTDIR}/template_vars.env"
if [ -f "${SOURCEFILE}" ]; then
. "$SOURCEFILE"
else
echo "Error: environment variables file not found: ${SOURCEFILE}"
exit 253
fi
# Check if python environment in use, if not source the default one
if [ -z "$VIRTUAL_ENV" ]; then
if [ -r "${PROJECTDIR}/.venv/bin/activate" ]; then
. "${PROJECTDIR}/.venv/bin/activate"
elif [ -r "${PROJECTDIR}/venv/bin/activate" ]; then
. "${PROJECTDIR}/venv/bin/activate"
elif [ -r "${PROJECTDIR}/env/bin/activate" ]; then
. "${PROJECTDIR}/env/bin/activate"
else
echo "Error: No virtual environment found in ${PROJECTDIR}/.venv, venv, or env"
exit 254
fi
fi
# Run the python script
python "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/cli.py" "$@"
if [ $? -ne 0 ]; then
echo "Error: Python script failed."
exit 255
fi
cd "${ORIGPATH}"

View File

@@ -0,0 +1,49 @@
#!/bin/sh
# Updates metadata using variables in template_vars.env
# To be used for updating already defined metadata in an
# existing project, when the values in the template_vars.env
# file have changed
# Currently updates information for project version,
# project description, and project keywords
# TODO: Add updating of additional metadata
# save to restore working directory later
ORIGPATH="$(pwd)"
# configure path information and working directory
SCRIPTPATH="$0"
cd "$(dirname ${SCRIPTPATH})/.." || exit 253
PROJECTDIR="$(pwd)"
# source template_vars.env as defined in SOURCEFILE
SOURCEFILE="${PROJECTDIR}/template_vars.env"
if [ -f "${SOURCEFILE}" ]; then
# shellcheck disable=SC1090
. "$SOURCEFILE"
else
echo "Error: environment variables file not found: ${SOURCEFILE}"
exit 253
fi
KW=$(echo "${TPL_PROJECTKEYWORDS}" | python -c 'import sys;import re;print( ",".join(["\"{}\"".format(re.sub("[^a-zA-Z01-9 ]","",e.strip())) for e in (sys.stdin.read()).split(",") if e.strip()]) )')
SED_CMD=$(which sed)
$SED_CMD -E "s/version\s*=\s*\"[0-9\.\-\_a-zA-Z]+\"/version = \"${TPL_PROJECTVERSION}\"/g" "${PROJECTDIR}/pyproject.toml"
$SED_CMD -E "s/description\s*=\s*\".*?\"/description = \"${TPL_PROJECTDESCRIPTION}\"/g" "${PROJECTDIR}/pyproject.toml"
$SED_CMD -E "s/^#?\s*keywords\s*=\s*\[.*\]/keywords = [\"${KW}\"]/g" "${PROJECTDIR}/pyproject.toml"
$SED_CMD -i "s/^__version__ = .*/__version__ = \"${TPL_PROJECTVERSION}\"/" "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/__init__.py"
$SED_CMD -i "s/^__author__ = .*/__author__ = \"${TPL_COMPANYNAME}(${TPL_AUTHORNAME})\"/" "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/__init__.py"
$SED_CMD -i "s/^__email__ = .*/__email__ = \"${TPL_AUTHOREMAIL}\"/" "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/__init__.py"
$SED_CMD -i "s/^__projectDescription__ = .*/__projectDescription__ = \"${TPL_PROJECTDESCRIPTION}\"/" "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/__init__.py"
$SED_CMD -i "s/^__description__ = .*/__description__ = \"${TPL_PROJECTDESCRIPTION}\"/" "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/__init__.py"
# release = '0.0.1'
$SED_CMD -i "s/^\s*release\s*=.*?/release = '${TPL_PROJECTVERSION}'/" "${PROJECTDIR}/docs/conf.py"
echo "${TPL_PROJECTVERSION}" > "${PROJECTDIR}/src/${TPL_PROJECTPREFIX}${TPL_PROJECTNAME}/version.txt"
cd "${ORIGPATH}" || exit 252

View File

@@ -0,0 +1,9 @@
__version__ = "0.0.2"
__author__ = "BipolarExpedition(Doc1979)"
__email__ = "lastdoc39@gmail.com"
__projectDescription__ = "The server component for authorized, non-owners to request a subdomain to be updated."
__description__ = "The server component for authorized, non-owners to request a subdomain to be updated."
__projectName__ = "subdomain_server"
__packageName__ = "bpe_subdomain_server"
__license__ = "MIT"
__repository__ = "https://github.com/BipolarExpedition/subdomain_server"

View File

@@ -0,0 +1,16 @@
# Project: subdomain_server
# Package: bpe_subdomain_server
# Description: The server component for authorized, non-owners to request a subdomain to be updated.
# Author: BipolarExpedition(Doc1979)
# Email: lastdoc39@gmail.com
# License: MIT
# Repository (if exists): "https://github.com/BipolarExpedition/subdomain_server"
__version__ = "0.0.2"
__author__ = "BipolarExpedition(Doc1979)"
__email__ = "lastdoc39@gmail.com"
__projectDescription__ = "The server component for authorized, non-owners to request a subdomain to be updated."
__description__ = "The server component for authorized, non-owners to request a subdomain to be updated."
__projectName__ = "subdomain_server"
__packageName__ = "bpe_subdomain_server"
__githubProfile__ = "https://github.com/BipolarExpedition"

View File

@@ -0,0 +1,253 @@
from flask import Flask, request, jsonify
import logging
# import json
import os
from datetime import datetime, timedelta
from bpe_subdomain_server.utils import AuthTable, update_domain, get_ip, logger
from bpe_subdomain_server.__about__ import (
__version__,
__author__,
# __email__,
# __description__,
__projectName__,
__license__,
# __repository__,
)
# last_update = None
# DEBUG = False
class Config(object):
config_path = "/etc/subdomains/auth.yaml"
auth_table = AuthTable()
BINDING = "0.0.0.0"
last_update = datetime.fromtimestamp(0)
port = 0
@classmethod
def setup_app(cls):
global logger
# global last_update
global DEBUG
# Initialize the last_update variable to 30min ago
cls.last_update = datetime.now() - timedelta(minutes=30)
_LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
_DEBUG = os.getenv("DEBUG", "False")
if _DEBUG.upper() in ["1", "Y", "YES", "TRUE", "T", "ON"]:
DEBUG = True
LOG_LEVEL = "DEBUG"
else:
DEBUG = False
LOG_LEVEL = _LOG_LEVEL
logger = setup_logging(LOG_LEVEL)
logger.info(
f"Starting {__projectName__} version {__version__}, {__license__} license {__author__}"
)
logger.info(f"Starting server with log level {LOG_LEVEL}")
IMMORTAL = os.getenv("IMMORTAL", False)
if isinstance(IMMORTAL, bool):
immortal = IMMORTAL
else:
IMMORTAL = str(IMMORTAL).lower().strip()
immortal = IMMORTAL.upper() in ["1", "Y", "YES", "TRUE", "T", "ON"]
config_path = os.getenv("AUTH_FILE", ";$:/\\")
BINDING = os.getenv("BINDING", "0.0.0.0")
port = 0
try:
port = int(os.getenv("PORT", "0"))
except ValueError:
pass
logger.debug(
f"Environment variables read: DEBUG[{_DEBUG}] LOG_LEVEL[{_LOG_LEVEL}] BINDING[{BINDING}] PORT[{port}] IMMORTAL[{IMMORTAL}] AUTH_FILE[{config_path}]"
)
# Check if port was not specified
if port == 0:
port = 5001
logger.warning(f"Using default port: {port}")
# The config file was not specified, set something up
if config_path in ["a_config.yaml", ";$:/\\"]:
config_path = "/etc/subdomains/auth.yaml"
logger.warning(f"Using default config path: {config_path}")
# Prefix script's directory if needed
if not os.path.exists(config_path):
if os.path.exists(os.path.join(os.path.dirname(__file__), config_path)):
config_path = os.path.join(os.path.dirname(__file__), config_path)
else:
# Log an error message
logger.error(f"Config file {config_path} does not exist.")
if not immortal:
# Raise exception file not found
raise FileNotFoundError(
f"Config file {config_path} does not exist."
)
logger.info(f"Using config file: {config_path}")
cls.config_path = config_path
cls.auth_table = AuthTable(config_path)
cls.binding = BINDING
cls.port = port
# load api keys early, so we can fail early
Config.auth_table.load_api_keys()
def setup_logging(
log_level: int | str = logging.INFO, logger_name="subdomain_server"
) -> logging.Logger:
"""
Set up logger interface. log_level defaults to INFO
"""
LU_log_level: dict[str, int] = {
"DEBUG": logging.DEBUG,
"INFO": logging.INFO,
"WARNING": logging.WARNING,
"ERROR": logging.ERROR,
"CRITICAL": logging.CRITICAL,
}
if isinstance(log_level, str):
i_log_level: int = LU_log_level.get(log_level.upper(), logging.INFO)
elif isinstance(log_level, int):
i_log_level = log_level
logger = logging.getLogger(logger_name)
logger.setLevel(i_log_level)
if not logger.handlers:
ch = logging.StreamHandler()
ch.setLevel(i_log_level)
formatter = logging.Formatter(
"%(asctime)s %(name)s[%(process)d]: %(levelname)s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
flask_app = Flask(__name__)
# TODO: add routes for lookup and health
@flask_app.route("/get-ip", methods=["GET", "POST"])
def request_get_ip():
forwarded_for = request.headers.get("X-Forwarded-For", None)
logger.debug(
f"X-Forwarded-For header: {forwarded_for}\nRemote address: {request.remote_addr}"
)
if forwarded_for:
client_ip = forwarded_for.split(",")[0]
else:
client_ip = request.remote_addr
return f"{client_ip}", 200
@flask_app.route("/update", methods=["POST"])
def update():
# Refresh API keys if the file was modified
Config.auth_table.load_api_keys()
api_keys_config = Config.auth_table.auth
logger.debug(f"API keys: {api_keys_config}")
logger.debug(f"Request json: {request.json}")
# Validate the request body
data = request.json
logger.debug(f"Received data: {data}")
if not data:
return jsonify({"error": "Invalid request body"}), 400
api_token = data.get("API_token")
subdomain = data.get("subdomain")
ip = data.get("ip")
auto_ip = data.get("auto_ip", "").lower()
if not api_token or not subdomain:
logger.warning("Update request missing API token or subdomain")
return jsonify({"error": "Missing required fields"}), 400
if not ip:
if auto_ip in {"1", "yes", "true"}:
logger.info("Update request missing ip and auto_ip is true")
forwarded_for = request.headers.get("X-Forwarded-For", None)
if forwarded_for:
ip = forwarded_for.split(",")[0]
logger.info(f"Using IP address from X-Forwarded-For header: {ip}")
else:
ip = request.remote_addr
logger.info(f"Using remote address for client IP: {ip}")
else:
logger.warning("Update request missing ip and auto_ip is false")
return jsonify({"error": "ip is required unless auto_ip is true"}), 400
# Authenticate API token and check permissions
if (
api_token not in api_keys_config["api_keys"]
or subdomain not in api_keys_config["api_keys"][api_token]["allowed_subdomains"]
):
logger.warning(
f"Unauthorized access attempt for subdomain {subdomain} with token {api_token}. IP: {ip}"
)
return jsonify({"error": "Unauthorized"}), 403
friendly_name = api_keys_config["api_keys"][api_token].get(
"friendly_name", api_token[:4] + "..." + api_token[-4:]
)
# Call the update function
step = "Running get_ip"
try:
full_domain = f"{subdomain}.{Config.auth_table.auth['dns_auth']['domain']}"
if get_ip(f"{full_domain}") == ip:
logger.info(f"IP address for {full_domain} is already up-to-date.")
return jsonify({"status": "Success"}), 200
else:
step = "Checking rate limit"
# TODO: Make the rate limiting better. Make last_update a dictionary with subdomain as key
# Check if the last update was more than 29 minutes ago
sincelast = datetime.now() - Config.last_update
if sincelast < timedelta(minutes=30):
logger.warning(
f"Too many requests to update {subdomain}. Last update was {sincelast} ago."
)
return jsonify(
{"error": f"Too many requests. Last update was {sincelast} ago."}
), 429
step = "Running update domain"
res = update_domain(subdomain, ip, Config.auth_table)
if res:
Config.last_update = datetime.now()
logger.info(
f"Updated subdomain {subdomain} with IP {ip} by {friendly_name}."
)
return jsonify({"status": "Success"}), 200
else:
return jsonify({"error": "Internal server error"}), 500
except Exception as e:
logger.error(f"Failed to update domain for {subdomain}: <{step}> {e}")
return jsonify({"error": "Internal server error"}), 500
if __name__ == "__main__":
Config.setup_app()
flask_app.run(host=Config.BINDING, port=Config.port)

View File

@@ -0,0 +1,151 @@
#!/usr/bin/env python
# Project: subdomain_server
# Package: bpe_subdomain_server
# Description: The server component for authorized, non-owners to request a subdomain to be updated.
# Author: BipolarExpedition(Doc1979)
# Email: lastdoc39@gmail.com
# License: MIT
# Repository (if exists): "https://github.com/BipolarExpedition/subdomain_server"
"""
A CLI interface for subdomain_server.
File is bpe_subdomain_server.cli.py
"""
import typer
from bpe_subdomain_server.__about__ import (
__version__,
__author__,
__email__,
__description__,
__projectName__,
__license__,
__repository__,
)
# Import the flask app
from bpe_subdomain_server.app import Config
from bpe_subdomain_server.app import setup_logging
from bpe_subdomain_server.app import flask_app
# from bpe_subdomain_server.utils import logger # noqa: F401
import os
# import sys
# service_config.py
daemon = typer.Typer()
@daemon.command()
def start(
config_path: str = ";$:/\\",
log_level: str = ";$:/\\",
port: int = 0,
binding: str = ";$:/\\",
debug: bool = False,
immortal: bool = False,
version: bool = False,
):
# debug = True
# config_path = "auth.yaml"
# port = 5232 # Default port for the server 5232
# if debug:
# log_level = "DEBUG"
if version:
print(f"{__projectName__} version: {__version__}")
print(f"License: {__license__} Author: {__author__}({__email__})")
print(f"Description: {__description__}")
print(f"Repository: {__repository__}")
print("\n")
exit(0)
# Set up logging
# logger = setup_logging(log_level)
# logger.debug(f"Launching from command line, {__projectName__} version {__version__}")
# # The config file was not specified, set something up
# if config_path == "a_config.yaml":
# config_path = "/etc/subdomains/auth.yaml"
# logger.warning(f"Using default config path: {config_path}")
# # Prefix script's directory if needed
# if not path.exists(config_path):
# if path.exists(path.join(path.dirname(__file__), config_path)):
# config_path = path.join(path.dirname(__file__), config_path)
# else:
# # Log an error message
# logger.error(f"Config file {config_path} does not exist.")
# # Raise exception file not found
# raise FileNotFoundError(f"Config file {config_path} does not exist.")
# # Check if port was not specified
# if port == 0:
# port = 5001
# logger.warning(f"Using default port: {port}")
# # Set environment variables for flask app to read
if config_path != ";$:/\\":
os.environ["AUTH_FILE"] = config_path
if log_level != ";$:/\\":
os.environ["LOG_LEVEL"] = log_level
if port != 0:
os.environ["PORT"] = str(port)
else:
port = int(os.getenv("PORT", "5001"))
if binding != ";$:/\\":
os.environ["BINDING"] = binding
else:
binding = os.getenv("BINDING", "0.0.0.0")
if immortal:
os.environ["IMMORTAL"] = str(immortal)
if debug:
os.environ["DEBUG"] = "TrUe"
os.environ["LOG_LEVEL"] = "DEBUG"
log_level = "DEBUG"
logger = setup_logging(log_level)
debug_env_variable = os.getenv("DEBUG")
if debug_env_variable and debug_env_variable.upper() in [
"1",
"Y",
"YES",
"TRUE",
"T",
"ON",
]:
logger.debug("Debugging active.")
env_vars = []
for variable_name in [
"AUTH_FILE",
"LOG_LEVEL",
"PORT",
"IMMORTAL",
"DEBUG",
"BINDING",
]:
if os.getenv(variable_name):
env_vars.append(f"{variable_name}={os.getenv(variable_name)}")
logger.debug("Environment variables set: " + " ".join(env_vars))
# Load configuration
Config.setup_app()
# Setup the flask app
flask_app.run(host=binding, port=port)
if __name__ == "__main__":
daemon()

View File

View File

@@ -0,0 +1,155 @@
import yaml
# import subprocess
import logging
import requests
import socket
from os import path
import xml.etree.ElementTree as ET
import ipaddress
from typing import Any
logger = logging.getLogger("subdomain_server")
def is_public_ip(ip):
# Check if an IP address is public or not
# Straight up copy n paste from chatGPT, ngl
try:
ip_obj = ipaddress.ip_address(ip)
return not (
ip_obj.is_private
or ip_obj.is_loopback
or ip_obj.is_reserved
or ip_obj.is_link_local
or ip_obj.is_multicast
)
except ValueError:
return False # Invalid IP address
class AuthTable:
def __init__(self, auth_yaml=r"/etc/subdomains/auth.yaml") -> None:
self.auth_file: str = auth_yaml
self.last_modified: float = 0.0
self.auth: dict = {}
def load_api_keys(self) -> bool:
if not path.exists(self.auth_file):
# TODO: Consider exception
return False
# Determine if the config file has been modified since the last time it was loaded
if self.last_modified != 0.0 and self.last_modified == path.getmtime(
self.auth_file
):
return True
with open(self.auth_file, "r") as file:
self.last_modified = path.getmtime(self.auth_file)
self.auth = yaml.safe_load(file)
return True
def is_authorized(self, apikey: str, subdomain: str) -> bool:
self.load_api_keys()
if "api_keys" not in self.auth:
logger.error("API keys not found in auth file")
return False
return (
apikey in self.auth["api_keys"]
and subdomain in self.auth["api_keys"][apikey]
)
def get_ip(addr: str) -> str:
logger.debug(f"Getting IP address for {addr}")
res = socket.gethostbyname(addr)
logger.debug(f"IP address for {addr}: {res}")
return res
# TODO: At either update_domain, update_namecheap, or both, make sure IP is not a reserved address
def update_domain(subdomain, ip, auth_table: AuthTable) -> bool:
# Verify that the IP address is not a reserved address
if not is_public_ip(ip):
logger.error(f"Ignoring request to update {subdomain} to reserved IP {ip}")
return False
auth_table.load_api_keys()
# Simulate calling an external script, e.g., "update_domain_script.sh"
# For now, just log the action
# logger.info(f"Running subprocess to update {subdomain} with IP {ip}.")
# print(f"Running subprocess to update {subdomain} with IP {ip}.")
ip_address = socket.gethostbyname(
f"{subdomain}.{auth_table.auth['dns_auth']['domain']}"
)
logger.info(f"Current IP address for {subdomain}: {ip_address}")
logger.info(f"Requested IP address for {subdomain}: {ip}")
if ip_address == ip:
logger.info(f"IP address for {subdomain} is already up-to-date.")
return True
else:
logger.info(f"Updating IP address for {subdomain} from {ip_address} to {ip}")
return update_namecheap(subdomain, ip, auth_table)
# subprocess.run(["/path/to/update_domain_script.sh", subdomain, ip], check=True)
def get_xml_text(xml_node, tag):
try:
return xml_node.find(tag).text
except AttributeError:
logger.debug(f"Could not find {tag} in XML node")
return None
def update_namecheap(subdomain, ip, auth_table: AuthTable) -> bool:
# Get authentication information from the config file
_host = subdomain
_domain = auth_table.auth["dns_auth"]["domain"]
_password = auth_table.auth["dns_auth"]["password"]
_url = auth_table.auth["dns_auth"]["url"]
_notes = auth_table.auth["dns_auth"]["notes"]
# Verify that the IP address is not a reserved address
if not is_public_ip(ip):
logger.error(f"Ignoring request to update {subdomain} to reserved IP {ip}")
return False
response = requests.get(
f"{_url}?host={_host}&domain={_domain}&password={_password}&ip={ip}"
)
if response.status_code == 200:
root = ET.fromstring(response.content.decode("utf-8"))
err_count = get_xml_text(root, "ErrCount")
if err_count == "0":
message = get_xml_text(root, "response")
logger.info(
f"Updated {subdomain} with IP {ip} on Namecheap. Message: {message}"
)
else:
try:
root_errors = root.find("errors")
if root_errors is not None:
errors_list: list[Any] = [n.text for n in root_errors.findall("*")]
error_block: str = "\n".join(errors_list)
else:
error_block = ""
except Exception:
error_block = ""
logger.error(
f"Failed to update {subdomain} with IP {ip} on Namecheap. API errors: {err_count}: {error_block}"
)
return False
else:
logger.error(
f"Failed to update {subdomain} with IP {ip} on Namecheap. Status code: {response.status_code}"
)
print(
f"Failed to update {subdomain} with IP {ip} on Namecheap. Status code: {response.status_code}"
)
return False
return True

View File

@@ -0,0 +1 @@
0.0.1

26
template_vars.env Normal file
View File

@@ -0,0 +1,26 @@
# filename: template_vars.env
#
# Environment variables for project metadata.
# Variables are set by the templater at creation
#
TPL_PROJECTVERSION=0.0.2
TPL_PROJECTDESCRIPTION="The server component for authorized, non-owners to request a subdomain to be updated."
# Format must be: "keyword", "keyword", "keyword"
TPL_PROJECTKEYWORDS="dns webapp"
# Github profile is the URL to the profile page, the base of other addresses
TPL_PROJECTURL="https://github.com/BipolarExpedition/subdomain_server"
TPL_PROJECTNAME="subdomain_server"
TPL_PROJECTPREFIX="bpe_"
TPL_AUTHOREMAIL="lastdoc39@gmail.com"
TPL_AUTHORNAME=Doc1979
TPL_COMPANYNAME="BipolarExpedition"
TPL_GITHUBPROFILE="https://github.com/BipolarExpedition"
# Templater does not support calculated values such as date or time
TPL_LICENSEYEAR=2025
TPL_COPYRIGHTHOLDER=BipolarExpedition.com

6
tests/conftest.py Normal file
View File

@@ -0,0 +1,6 @@
import pytest
import sys
import os
# Adjust PATH environment variable to include the src directory
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src')))