mirror of
https://github.com/QIDITECH/moonraker.git
synced 2026-01-30 16:18:44 +03:00
QIDI moonraker
This commit is contained in:
64
scripts/build-zip-release.sh
Normal file
64
scripts/build-zip-release.sh
Normal file
@@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
# This script builds a zipped source release for Moonraker and Klipper.
|
||||
|
||||
install_packages()
|
||||
{
|
||||
PKGLIST="python3-dev curl"
|
||||
|
||||
# Update system package info
|
||||
report_status "Running apt-get update..."
|
||||
sudo apt-get update
|
||||
|
||||
# Install desired packages
|
||||
report_status "Installing packages..."
|
||||
sudo apt-get install --yes $PKGLIST
|
||||
}
|
||||
|
||||
report_status()
|
||||
{
|
||||
echo -e "\n\n###### $1"
|
||||
}
|
||||
|
||||
verify_ready()
|
||||
{
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
echo "This script must not run as root"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if [ ! -d "$SRCDIR/.git" ]; then
|
||||
echo "This script must be run from a git repo"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if [ ! -d "$KLIPPER_DIR/.git" ]; then
|
||||
echo "This script must be run from a git repo"
|
||||
exit -1
|
||||
fi
|
||||
}
|
||||
|
||||
# Force script to exit if an error occurs
|
||||
set -e
|
||||
|
||||
SRCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )"
|
||||
OUTPUT_DIR="$SRCDIR/.dist"
|
||||
KLIPPER_DIR="$HOME/klipper"
|
||||
BETA=""
|
||||
|
||||
# Parse command line arguments
|
||||
while getopts "o:k:b" arg; do
|
||||
case $arg in
|
||||
o) OUTPUT_DIR=$OPTARG;;
|
||||
k) KLIPPER_DIR=$OPTARG;;
|
||||
b) BETA="-b";;
|
||||
esac
|
||||
done
|
||||
|
||||
[ ! -d $OUTPUT_DIR ] && mkdir $OUTPUT_DIR
|
||||
verify_ready
|
||||
if [ "$BETA" = "" ]; then
|
||||
releaseTag=$( git -C $KLIPPER_DIR describe --tags `git -C $KLIPPER_DIR rev-list --tags --max-count=1` )
|
||||
echo "Checking out Klipper release $releaseTag"
|
||||
git -C $KLIPPER_DIR checkout $releaseTag
|
||||
fi
|
||||
python3 "$SRCDIR/scripts/build_release.py" -k $KLIPPER_DIR -o $OUTPUT_DIR $BETA
|
||||
338
scripts/build_release.py
Normal file
338
scripts/build_release.py
Normal file
@@ -0,0 +1,338 @@
|
||||
#!/usr/bin/python3
|
||||
# Builds zip release files for Moonraker and Klipper
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import shutil
|
||||
import tempfile
|
||||
import json
|
||||
import pathlib
|
||||
import time
|
||||
import traceback
|
||||
import subprocess
|
||||
import re
|
||||
from typing import Dict, Any, List, Set, Optional
|
||||
|
||||
MOONRAKER_PATH = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), ".."))
|
||||
sys.path.append(os.path.join(MOONRAKER_PATH, "moonraker"))
|
||||
from utils import hash_directory, retrieve_git_version # noqa:E402
|
||||
|
||||
# Dirs and exts to ignore when calculating the repo hash
|
||||
IGNORE_DIRS = ["out", "lib", "test", "docs", "__pycache__"]
|
||||
IGNORE_EXTS = [".o", ".so", ".pyc", ".pyo", ".pyd", ".yml", ".yaml"]
|
||||
|
||||
# Files not to include in the source package
|
||||
SKIP_FILES = [".gitignore", ".gitattributes", ".readthedocs.yaml",
|
||||
"mkdocs.yml", "__pycache__"]
|
||||
|
||||
RELEASE_URL = "https://api.github.com/repos/Arksine/moonraker/releases"
|
||||
GIT_MAX_LOG_CNT = 100
|
||||
GIT_LOG_FMT = \
|
||||
"sha:%H%x1Dauthor:%an%x1Ddate:%ct%x1Dsubject:%s%x1Dmessage:%b%x1E"
|
||||
OWNER_REPOS = {
|
||||
'moonraker': "arksine/moonraker",
|
||||
'klippy': "klipper3d/klipper"
|
||||
}
|
||||
INSTALL_SCRIPTS = {
|
||||
'klippy': {
|
||||
'debian': "install-octopi.sh",
|
||||
'arch': "install-arch.sh",
|
||||
'centos': "install-centos.sh"
|
||||
},
|
||||
'moonraker': {
|
||||
'debian': "install-moonraker.sh"
|
||||
}
|
||||
}
|
||||
|
||||
class CopyIgnore:
|
||||
def __init__(self, root_dir: str) -> None:
|
||||
self.root_dir = root_dir
|
||||
|
||||
def __call__(self, dir_path: str, dir_items: List[str]) -> List[str]:
|
||||
ignored: List[str] = []
|
||||
for item in dir_items:
|
||||
if item in SKIP_FILES:
|
||||
ignored.append(item)
|
||||
elif dir_path == self.root_dir:
|
||||
full_path = os.path.join(dir_path, item)
|
||||
# Ignore all hidden directories in the root
|
||||
if os.path.isdir(full_path) and item[0] == ".":
|
||||
ignored.append(item)
|
||||
return ignored
|
||||
|
||||
def search_install_script(data: str,
|
||||
regex: str,
|
||||
exclude: str
|
||||
) -> List[str]:
|
||||
items: Set[str] = set()
|
||||
lines: List[str] = re.findall(regex, data)
|
||||
for line in lines:
|
||||
items.update(line.strip().split())
|
||||
try:
|
||||
items.remove(exclude)
|
||||
except KeyError:
|
||||
pass
|
||||
return list(items)
|
||||
|
||||
def generate_dependency_info(repo_path: str, app_name: str) -> None:
|
||||
inst_scripts = INSTALL_SCRIPTS[app_name]
|
||||
package_info: Dict[str, Any] = {}
|
||||
for distro, script_name in inst_scripts.items():
|
||||
script_path = os.path.join(repo_path, "scripts", script_name)
|
||||
script = pathlib.Path(script_path)
|
||||
if not script.exists():
|
||||
continue
|
||||
data = script.read_text()
|
||||
packages: List[str] = search_install_script(
|
||||
data, r'PKGLIST="(.*)"', "${PKGLIST}")
|
||||
package_info[distro] = {'packages': sorted(packages)}
|
||||
if distro == "arch":
|
||||
aur_packages: List[str] = search_install_script(
|
||||
data, r'AURLIST="(.*)"', "${AURLIST}")
|
||||
package_info[distro]['aur_packages'] = sorted(aur_packages)
|
||||
req_file_name = os.path.join(repo_path, "scripts",
|
||||
f"{app_name}-requirements.txt")
|
||||
req_file = pathlib.Path(req_file_name)
|
||||
python_reqs: List[str] = []
|
||||
if req_file.exists():
|
||||
req_data = req_file.read_text()
|
||||
lines = [line.strip() for line in req_data.split('\n')
|
||||
if line.strip()]
|
||||
for line in lines:
|
||||
comment_idx = line.find('#')
|
||||
if comment_idx == 0:
|
||||
continue
|
||||
if comment_idx > 0:
|
||||
line = line[:comment_idx].strip()
|
||||
python_reqs.append(line)
|
||||
package_info['python'] = sorted(python_reqs)
|
||||
dep_file = pathlib.Path(os.path.join(repo_path, ".dependencies"))
|
||||
dep_file.write_text(json.dumps(package_info))
|
||||
|
||||
def clean_repo(path: str) -> None:
|
||||
# Obtain version info from "git" program
|
||||
prog = ('git', '-C', path, 'clean', '-x', '-f', '-d')
|
||||
process = subprocess.Popen(prog, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, cwd=path)
|
||||
retcode = process.wait()
|
||||
if retcode != 0:
|
||||
print(f"Error running git clean: {path}")
|
||||
|
||||
def get_releases() -> List[Dict[str, Any]]:
|
||||
print("Fetching Release List...")
|
||||
prog = ('curl', '-H', "Accept: application/vnd.github.v3+json",
|
||||
RELEASE_URL)
|
||||
process = subprocess.Popen(prog, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
response, err = process.communicate()
|
||||
retcode = process.wait()
|
||||
if retcode != 0:
|
||||
print(f"Release list request returned with code {retcode},"
|
||||
f" response:\n{err.decode()}")
|
||||
return []
|
||||
releases = json.loads(response.decode().strip())
|
||||
print(f"Found {len(releases)} releases")
|
||||
return releases
|
||||
|
||||
def get_last_release_info(moonraker_version: str,
|
||||
is_beta: bool,
|
||||
releases: List[Dict[str, Any]]
|
||||
) -> Dict[str, Any]:
|
||||
print("Searching for previous release assets...")
|
||||
cur_tag, commit_count = moonraker_version.split('-', 2)[:2]
|
||||
release_assets = []
|
||||
matched_tag: Optional[str] = None
|
||||
for release in releases:
|
||||
if int(commit_count) != 0:
|
||||
# This is build is not being done against a fresh release,
|
||||
# return release info from a matching tag
|
||||
if release['tag_name'] == cur_tag:
|
||||
release_assets = release['assets']
|
||||
matched_tag = cur_tag
|
||||
break
|
||||
else:
|
||||
# Get the most recent non-matching tag
|
||||
if release['tag_name'] == cur_tag:
|
||||
continue
|
||||
if is_beta or not release['prerelease']:
|
||||
# Get the last tagged release. If we are building a beta,
|
||||
# that is the most recent release. Otherwise we should
|
||||
# omit pre-releases
|
||||
release_assets = release['assets']
|
||||
matched_tag = release['tag_name']
|
||||
break
|
||||
if matched_tag is None:
|
||||
print("No matching release found")
|
||||
matched_tag = "No Tag"
|
||||
else:
|
||||
print(f"Found release: {matched_tag}")
|
||||
|
||||
asset_url: Optional[str] = None
|
||||
content_type: str = ""
|
||||
for asset in release_assets:
|
||||
if asset['name'] == "RELEASE_INFO":
|
||||
asset_url = asset['browser_download_url']
|
||||
content_type = asset['content_type']
|
||||
break
|
||||
if asset_url is None:
|
||||
print(f"RELEASE_INFO asset not found in release: {matched_tag}")
|
||||
return {}
|
||||
# This build is prior to a tagged release, so fetch the current tag
|
||||
print(f"Release Info Download URL: {asset_url}")
|
||||
prog = ('curl', '-L', '-H', f"Accept: {content_type}", asset_url)
|
||||
process = subprocess.Popen(prog, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
response, err = process.communicate()
|
||||
retcode = process.wait()
|
||||
if retcode != 0:
|
||||
print("Request for release info failed")
|
||||
return {}
|
||||
resp = response.decode().strip()
|
||||
print(f"Found Info for release {matched_tag}")
|
||||
return json.loads(resp)
|
||||
|
||||
def get_commit_log(path: str,
|
||||
release_info: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
print(f"Preparing commit log for {path.split('/')[-1]}")
|
||||
start_sha = release_info.get('commit_hash', None)
|
||||
prog = ['git', '-C', path, 'log', f'--format={GIT_LOG_FMT}',
|
||||
f'--max-count={GIT_MAX_LOG_CNT}']
|
||||
if start_sha is not None:
|
||||
prog = ['git', '-C', path, 'log', f'{start_sha}..HEAD',
|
||||
f'--format={GIT_LOG_FMT}', f'--max-count={GIT_MAX_LOG_CNT}']
|
||||
process = subprocess.Popen(prog, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, cwd=path)
|
||||
response, err = process.communicate()
|
||||
retcode = process.wait()
|
||||
if retcode != 0:
|
||||
return []
|
||||
resp = response.decode().strip()
|
||||
commit_log: List[Dict[str, Any]] = []
|
||||
for log_entry in resp.split('\x1E'):
|
||||
log_entry = log_entry.strip()
|
||||
if not log_entry:
|
||||
continue
|
||||
log_items = [li.strip() for li in log_entry.split('\x1D')
|
||||
if li.strip()]
|
||||
cbh = [li.split(':', 1) for li in log_items]
|
||||
commit_log.append(dict(cbh)) # type: ignore
|
||||
print(f"Found {len(commit_log)} commits")
|
||||
return commit_log
|
||||
|
||||
def get_commit_hash(path: str) -> str:
|
||||
prog = ('git', '-C', path, 'rev-parse', 'HEAD')
|
||||
process = subprocess.Popen(prog, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, cwd=path)
|
||||
commit_hash, err = process.communicate()
|
||||
retcode = process.wait()
|
||||
if retcode == 0:
|
||||
return commit_hash.strip().decode()
|
||||
raise Exception(f"Failed to get commit hash: {commit_hash.decode()}")
|
||||
|
||||
def generate_version_info(path: str,
|
||||
source_dir: str,
|
||||
channel: str,
|
||||
release_tag: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
print(f"Generating version info: {source_dir}")
|
||||
clean_repo(path)
|
||||
owner_repo = OWNER_REPOS[source_dir]
|
||||
curtime = int(time.time())
|
||||
date_str = time.strftime("%Y%m%d", time.gmtime(curtime))
|
||||
version = retrieve_git_version(path)
|
||||
if release_tag is None:
|
||||
release_tag = version.split('-')[0]
|
||||
source_hash = hash_directory(path, IGNORE_EXTS, IGNORE_DIRS)
|
||||
long_version = f"{version}-moonraker-{date_str}"
|
||||
release_info = {
|
||||
'git_version': version,
|
||||
'long_version': long_version,
|
||||
'commit_hash': get_commit_hash(path),
|
||||
'source_checksum': source_hash,
|
||||
'ignored_exts': IGNORE_EXTS,
|
||||
'ignored_dirs': IGNORE_DIRS,
|
||||
'build_date': curtime,
|
||||
'channel': channel,
|
||||
'owner_repo': owner_repo,
|
||||
'host_repo': OWNER_REPOS['moonraker'],
|
||||
'release_tag': release_tag
|
||||
}
|
||||
vfile = pathlib.Path(os.path.join(path, source_dir, ".version"))
|
||||
vfile.write_text(long_version)
|
||||
rfile = pathlib.Path(os.path.join(path, ".release_info"))
|
||||
rfile.write_text(json.dumps(release_info))
|
||||
generate_dependency_info(path, source_dir)
|
||||
return release_info
|
||||
|
||||
def create_zip(repo_path: str,
|
||||
repo_name: str,
|
||||
output_path: str
|
||||
) -> None:
|
||||
print(f"Creating Zip Release: {repo_name}")
|
||||
zip_path = os.path.join(output_path, repo_name)
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
dest_path = os.path.join(tmp_dir, repo_name)
|
||||
ingore_cb = CopyIgnore(repo_path)
|
||||
shutil.copytree(repo_path, dest_path, ignore=ingore_cb)
|
||||
shutil.make_archive(zip_path, "zip", root_dir=dest_path)
|
||||
|
||||
def main() -> None:
|
||||
# Parse start arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generates zip releases for Moonraker and Klipper")
|
||||
parser.add_argument(
|
||||
"-k", "--klipper", default="~/klipper",
|
||||
metavar='<klipper_path>',
|
||||
help="Path to Klipper git repo")
|
||||
parser.add_argument(
|
||||
"-o", "--output", default=os.path.join(MOONRAKER_PATH, ".dist"),
|
||||
metavar='<output_path>', help="Path to output directory")
|
||||
parser.add_argument(
|
||||
"-b", "--beta", action='store_true',
|
||||
help="Tag release as beta")
|
||||
args = parser.parse_args()
|
||||
kpath: str = os.path.abspath(os.path.expanduser(args.klipper))
|
||||
opath: str = os.path.abspath(os.path.expanduser(args.output))
|
||||
is_beta: bool = args.beta
|
||||
channel = "beta" if is_beta else "stable"
|
||||
if not os.path.exists(kpath):
|
||||
print(f"Invalid path to Klipper: {kpath}")
|
||||
sys.exit(-1)
|
||||
if not os.path.exists(opath):
|
||||
print(f"Invalid output path: {opath}")
|
||||
sys.exit(-1)
|
||||
releases = get_releases()
|
||||
all_info: Dict[str, Dict[str, Any]] = {}
|
||||
try:
|
||||
print("Generating Moonraker Zip Distribution...")
|
||||
all_info['moonraker'] = generate_version_info(
|
||||
MOONRAKER_PATH, "moonraker", channel)
|
||||
create_zip(MOONRAKER_PATH, 'moonraker', opath)
|
||||
rtag: str = all_info['moonraker']['release_tag']
|
||||
print("Generating Klipper Zip Distribution...")
|
||||
all_info['klipper'] = generate_version_info(
|
||||
kpath, "klippy", channel, rtag)
|
||||
create_zip(kpath, 'klipper', opath)
|
||||
info_file = pathlib.Path(os.path.join(opath, "RELEASE_INFO"))
|
||||
info_file.write_text(json.dumps(all_info))
|
||||
last_rinfo = get_last_release_info(
|
||||
all_info['moonraker']['git_version'], is_beta, releases)
|
||||
commit_log = {}
|
||||
commit_log['moonraker'] = get_commit_log(
|
||||
MOONRAKER_PATH, last_rinfo.get('moonraker', {}))
|
||||
commit_log['klipper'] = get_commit_log(
|
||||
kpath, last_rinfo.get('klipper', {}))
|
||||
clog_file = pathlib.Path(os.path.join(opath, "COMMIT_LOG"))
|
||||
clog_file.write_text(json.dumps(commit_log))
|
||||
except Exception:
|
||||
print("Error Creating Zip Distribution")
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
sys.exit(-1)
|
||||
print(f"Build Complete. Files are located at '{opath}'")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
233
scripts/dbtool.py
Normal file
233
scripts/dbtool.py
Normal file
@@ -0,0 +1,233 @@
|
||||
#! /usr/bin/python3
|
||||
# Tool to backup and restore Moonraker's LMDB database
|
||||
#
|
||||
# Copyright (C) 2022 Eric Callahan <arksine.code@gmail.com>
|
||||
#
|
||||
# This file may be distributed under the terms of the GNU GPLv3 license
|
||||
import argparse
|
||||
import pathlib
|
||||
import base64
|
||||
import tempfile
|
||||
import re
|
||||
from typing import Any, Dict, Optional, TextIO, Tuple
|
||||
import lmdb
|
||||
|
||||
MAX_NAMESPACES = 100
|
||||
MAX_DB_SIZE = 200 * 2**20
|
||||
HEADER_KEY = b"MOONRAKER_DATABASE_START"
|
||||
|
||||
LINE_MATCH = re.compile(r"\+(\d+),(\d+):(.+?)->(.+)")
|
||||
|
||||
class DBToolError(Exception):
|
||||
pass
|
||||
|
||||
# Use a modified CDBMake Format
|
||||
# +keylen,datalen:namespace|key->data
|
||||
# Key length includes the namespace, key and separator (a colon)
|
||||
|
||||
def open_db(db_path: str) -> lmdb.Environment:
|
||||
return lmdb.open(db_path, map_size=MAX_DB_SIZE,
|
||||
max_dbs=MAX_NAMESPACES)
|
||||
|
||||
def _do_dump(namespace: bytes,
|
||||
db: object,
|
||||
backup: TextIO,
|
||||
txn: lmdb.Transaction
|
||||
) -> None:
|
||||
expected_key_count: int = txn.stat(db)["entries"]
|
||||
# write the namespace header
|
||||
ns_key = base64.b64encode(b"namespace_" + namespace).decode()
|
||||
ns_str = f"entries={expected_key_count}"
|
||||
ns_val = base64.b64encode(ns_str.encode()).decode()
|
||||
out = f"+{len(ns_key)},{len(ns_val)}:{ns_key}->{ns_val}\n"
|
||||
backup.write(out)
|
||||
with txn.cursor(db=db) as cursor:
|
||||
count = 0
|
||||
remaining = cursor.first()
|
||||
while remaining:
|
||||
key, value = cursor.item()
|
||||
keystr = base64.b64encode(key).decode()
|
||||
valstr = base64.b64encode(value).decode()
|
||||
out = f"+{len(keystr)},{len(valstr)}:{keystr}->{valstr}\n"
|
||||
backup.write(out)
|
||||
count += 1
|
||||
remaining = cursor.next()
|
||||
if expected_key_count != count:
|
||||
print("Warning: Key count mismatch for namespace "
|
||||
f"'{namespace.decode()}': expected {expected_key_count}"
|
||||
f", wrote {count}")
|
||||
|
||||
def _write_header(ns_count: int, backup: TextIO):
|
||||
val_str = f"namespace_count={ns_count}"
|
||||
hkey = base64.b64encode(HEADER_KEY).decode()
|
||||
hval = base64.b64encode(val_str.encode()).decode()
|
||||
out = f"+{len(hkey)},{len(hval)}:{hkey}->{hval}\n"
|
||||
backup.write(out)
|
||||
|
||||
def backup(args: Dict[str, Any]):
|
||||
source_db = pathlib.Path(args["source"]).expanduser().resolve()
|
||||
if not source_db.is_dir():
|
||||
print(f"Source path not a folder: '{source_db}'")
|
||||
exit(1)
|
||||
if not source_db.joinpath("data.mdb").exists():
|
||||
print(f"No database file found in source path: '{source_db}'")
|
||||
exit(1)
|
||||
bkp_dest = pathlib.Path(args["output"]).expanduser().resolve()
|
||||
print(f"Backing up database at '{source_db}' to '{bkp_dest}'...")
|
||||
if bkp_dest.exists():
|
||||
print(f"Warning: file at '{bkp_dest}' exists, will be overwritten")
|
||||
env = open_db(str(source_db))
|
||||
expected_ns_cnt: int = env.stat()["entries"]
|
||||
with bkp_dest.open("wt") as f:
|
||||
_write_header(expected_ns_cnt, f)
|
||||
with env.begin(buffers=True) as txn:
|
||||
count = 0
|
||||
with txn.cursor() as cursor:
|
||||
remaining = cursor.first()
|
||||
while remaining:
|
||||
namespace = bytes(cursor.key())
|
||||
db = env.open_db(namespace, txn=txn, create=False)
|
||||
_do_dump(namespace, db, f, txn)
|
||||
count += 1
|
||||
remaining = cursor.next()
|
||||
env.close()
|
||||
if expected_ns_cnt != count:
|
||||
print("Warning: namespace count mismatch: "
|
||||
f"expected: {expected_ns_cnt}, wrote: {count}")
|
||||
print("Backup complete!")
|
||||
|
||||
def _process_header(key: bytes, value: bytes) -> int:
|
||||
if key != HEADER_KEY:
|
||||
raise DBToolError(
|
||||
"Database Backup does not contain a valid header key, "
|
||||
f" got {key.decode()}")
|
||||
val_parts = value.split(b"=", 1)
|
||||
if val_parts[0] != b"namespace_count":
|
||||
raise DBToolError(
|
||||
"Database Backup has an invalid header value, got "
|
||||
f"{value.decode()}")
|
||||
return int(val_parts[1])
|
||||
|
||||
def _process_namespace(key: bytes, value: bytes) -> Tuple[bytes, int]:
|
||||
key_parts = key.split(b"_", 1)
|
||||
if key_parts[0] != b"namespace":
|
||||
raise DBToolError(
|
||||
f"Invalid Namespace Key '{key.decode()}', ID not prefixed")
|
||||
namespace = key_parts[1]
|
||||
val_parts = value.split(b"=", 1)
|
||||
if val_parts[0] != b"entries":
|
||||
raise DBToolError(
|
||||
f"Invalid Namespace value '{value.decode()}', entry "
|
||||
"count not present")
|
||||
entries = int(val_parts[1])
|
||||
return namespace, entries
|
||||
|
||||
def _process_line(line: str) -> Tuple[bytes, bytes]:
|
||||
match = LINE_MATCH.match(line)
|
||||
if match is None:
|
||||
# TODO: use own exception
|
||||
raise DBToolError(
|
||||
f"Invalid DB Entry match: {line}")
|
||||
parts = match.groups()
|
||||
if len(parts) != 4:
|
||||
raise DBToolError(
|
||||
f"Invalid DB Entry, does not contain all data: {line}")
|
||||
key_len, val_len, key, val = parts
|
||||
if len(key) != int(key_len):
|
||||
raise DBToolError(
|
||||
f"Invalid DB Entry, key length mismatch. "
|
||||
f"Got {len(key)}, expected {key_len}, line: {line}")
|
||||
if len(val) != int(val_len):
|
||||
raise DBToolError(
|
||||
f"Invalid DB Entry, value length mismatch. "
|
||||
f"Got {len(val)}, expected {val_len}, line: {line}")
|
||||
decoded_key = base64.b64decode(key.encode())
|
||||
decoded_val = base64.b64decode(val.encode())
|
||||
return decoded_key, decoded_val
|
||||
|
||||
def restore(args: Dict[str, Any]):
|
||||
dest_path = pathlib.Path(args["destination"]).expanduser().resolve()
|
||||
input_db = pathlib.Path(args["input"]).expanduser().resolve()
|
||||
if not input_db.is_file():
|
||||
print(f"No backup found at path: {input_db}")
|
||||
exit(1)
|
||||
if not dest_path.exists():
|
||||
print(f"Destination path '{dest_path}' does not exist, directory"
|
||||
"will be created")
|
||||
print(f"Restoring backup from '{input_db}' to '{dest_path}'...")
|
||||
bkp_dir: Optional[pathlib.Path] = None
|
||||
if dest_path.joinpath("data.mdb").exists():
|
||||
tmp_dir = pathlib.Path(tempfile.gettempdir())
|
||||
bkp_dir = tmp_dir.joinpath("moonrakerdb_backup")
|
||||
if not bkp_dir.is_dir():
|
||||
bkp_dir.mkdir()
|
||||
print(f"Warning: database file at found in '{dest_path}', "
|
||||
"all data will be overwritten. Copying existing DB "
|
||||
f"to '{bkp_dir}'")
|
||||
env = open_db(str(dest_path))
|
||||
if bkp_dir is not None:
|
||||
env.copy(str(bkp_dir))
|
||||
expected_ns_count = -1
|
||||
namespace_count = 0
|
||||
keys_left = 0
|
||||
namespace = b""
|
||||
current_db = object()
|
||||
with env.begin(write=True) as txn:
|
||||
# clear all existing entries
|
||||
dbs = []
|
||||
with txn.cursor() as cursor:
|
||||
remaining = cursor.first()
|
||||
while remaining:
|
||||
ns = cursor.key()
|
||||
dbs.append(env.open_db(ns, txn=txn, create=False))
|
||||
remaining = cursor.next()
|
||||
for db in dbs:
|
||||
txn.drop(db)
|
||||
with input_db.open("rt") as f:
|
||||
while True:
|
||||
line = f.readline()
|
||||
if not line:
|
||||
break
|
||||
key, val = _process_line(line)
|
||||
if expected_ns_count < 0:
|
||||
expected_ns_count = _process_header(key, val)
|
||||
continue
|
||||
if not keys_left:
|
||||
namespace, keys_left = _process_namespace(key, val)
|
||||
current_db = env.open_db(namespace, txn=txn)
|
||||
namespace_count += 1
|
||||
continue
|
||||
txn.put(key, val, db=current_db)
|
||||
keys_left -= 1
|
||||
if expected_ns_count != namespace_count:
|
||||
print("Warning: Namespace count mismatch, expected: "
|
||||
f"{expected_ns_count}, processed {namespace_count}")
|
||||
print("Restore Complete")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Parse start arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
description="dbtool - tool for backup/restore of Moonraker's database")
|
||||
subparsers = parser.add_subparsers(
|
||||
title="commands", description="valid commands", required=True,
|
||||
metavar="<command>")
|
||||
bkp_parser = subparsers.add_parser("backup", help="backup a database")
|
||||
rst_parser = subparsers.add_parser("restore", help="restore a databse")
|
||||
bkp_parser.add_argument(
|
||||
"source", metavar="<source path>",
|
||||
help="location of the folder containing the database to backup")
|
||||
bkp_parser.add_argument(
|
||||
"output", metavar="<output file>",
|
||||
help="location of the backup file to write to",
|
||||
default="~/moonraker_db.bkp")
|
||||
bkp_parser.set_defaults(func=backup)
|
||||
rst_parser.add_argument(
|
||||
"destination", metavar="<destination>",
|
||||
help="location of the folder where the database will be restored")
|
||||
rst_parser.add_argument(
|
||||
"input", metavar="<input file>",
|
||||
help="location of the backup file to restore from")
|
||||
rst_parser.set_defaults(func=restore)
|
||||
args = parser.parse_args()
|
||||
args.func(vars(args))
|
||||
42
scripts/fetch-apikey.sh
Normal file
42
scripts/fetch-apikey.sh
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
# Helper Script for fetching the API Key from a moonraker database
|
||||
DATABASE_PATH="${HOME}/.moonraker_database"
|
||||
MOONRAKER_ENV="${HOME}/moonraker-env"
|
||||
DB_ARGS="--read=READ --db=authorized_users get _API_KEY_USER_"
|
||||
API_REGEX='(?<="api_key": ")([^"]+)'
|
||||
|
||||
print_help()
|
||||
{
|
||||
echo "Moonraker API Key Extraction Utility"
|
||||
echo
|
||||
echo "usage: fetch-apikey.sh [-h] [-e <python env path>] [-d <database path>]"
|
||||
echo
|
||||
echo "optional arguments:"
|
||||
echo " -h show this message"
|
||||
echo " -e <env path> path to Moonraker env folder"
|
||||
echo " -d <database path> path to Moonraker LMDB database folder"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
while getopts "he:d:" arg; do
|
||||
case $arg in
|
||||
h) print_help;;
|
||||
e) MOONRAKER_ENV=$OPTARG;;
|
||||
d) DATABASE_PATH=$OPTARG;;
|
||||
esac
|
||||
done
|
||||
|
||||
PYTHON_BIN="${MOONRAKER_ENV}/bin/python"
|
||||
|
||||
if [ ! -f $PYTHON_BIN ]; then
|
||||
echo "No Python binary found at '${PYTHON_BIN}'"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if [ ! -d $DATABASE_PATH ]; then
|
||||
echo "No Moonraker database found at '${DATABASE_PATH}'"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
${PYTHON_BIN} -mlmdb --env=${DATABASE_PATH} ${DB_ARGS} | grep -Po "${API_REGEX}"
|
||||
174
scripts/install-moonraker.sh
Normal file
174
scripts/install-moonraker.sh
Normal file
@@ -0,0 +1,174 @@
|
||||
#!/bin/bash
|
||||
# This script installs Moonraker on a Raspberry Pi machine running
|
||||
# Raspbian/Raspberry Pi OS based distributions.
|
||||
|
||||
PYTHONDIR="${MOONRAKER_VENV:-${HOME}/moonraker-env}"
|
||||
SYSTEMDDIR="/etc/systemd/system"
|
||||
REBUILD_ENV="${MOONRAKER_REBUILD_ENV:-n}"
|
||||
FORCE_DEFAULTS="${MOONRAKER_FORCE_DEFAULTS:-n}"
|
||||
DISABLE_SYSTEMCTL="${MOONRAKER_DISABLE_SYSTEMCTL:-n}"
|
||||
CONFIG_PATH="${MOONRAKER_CONFIG_PATH:-${HOME}/moonraker.conf}"
|
||||
LOG_PATH="${MOONRAKER_LOG_PATH:-/tmp/moonraker.log}"
|
||||
|
||||
# Step 2: Clean up legacy installation
|
||||
cleanup_legacy() {
|
||||
if [ -f "/etc/init.d/moonraker" ]; then
|
||||
# Stop Moonraker Service
|
||||
echo "#### Cleanup legacy install script"
|
||||
sudo systemctl stop moonraker
|
||||
sudo update-rc.d -f moonraker remove
|
||||
sudo rm -f /etc/init.d/moonraker
|
||||
sudo rm -f /etc/default/moonraker
|
||||
fi
|
||||
}
|
||||
|
||||
# Step 3: Install packages
|
||||
install_packages()
|
||||
{
|
||||
PKGLIST="python3-virtualenv python3-dev libopenjp2-7 python3-libgpiod"
|
||||
PKGLIST="${PKGLIST} curl libcurl4-openssl-dev libssl-dev liblmdb-dev"
|
||||
PKGLIST="${PKGLIST} libsodium-dev zlib1g-dev libjpeg-dev packagekit"
|
||||
|
||||
# Update system package info
|
||||
report_status "Running apt-get update..."
|
||||
sudo apt-get update --allow-releaseinfo-change
|
||||
|
||||
# Install desired packages
|
||||
report_status "Installing packages..."
|
||||
sudo apt-get install --yes ${PKGLIST}
|
||||
}
|
||||
|
||||
# Step 4: Create python virtual environment
|
||||
create_virtualenv()
|
||||
{
|
||||
report_status "Installing python virtual environment..."
|
||||
|
||||
# If venv exists and user prompts a rebuild, then do so
|
||||
if [ -d ${PYTHONDIR} ] && [ $REBUILD_ENV = "y" ]; then
|
||||
report_status "Removing old virtualenv"
|
||||
rm -rf ${PYTHONDIR}
|
||||
fi
|
||||
|
||||
if [ ! -d ${PYTHONDIR} ]; then
|
||||
GET_PIP="${HOME}/get-pip.py"
|
||||
virtualenv --no-pip -p /usr/bin/python3 ${PYTHONDIR}
|
||||
curl https://bootstrap.pypa.io/pip/3.6/get-pip.py -o ${GET_PIP}
|
||||
${PYTHONDIR}/bin/python ${GET_PIP}
|
||||
rm ${GET_PIP}
|
||||
fi
|
||||
|
||||
# Install/update dependencies
|
||||
${PYTHONDIR}/bin/pip install -r ${SRCDIR}/scripts/moonraker-requirements.txt
|
||||
}
|
||||
|
||||
# Step 5: Install startup script
|
||||
install_script()
|
||||
{
|
||||
# Create systemd service file
|
||||
SERVICE_FILE="${SYSTEMDDIR}/moonraker.service"
|
||||
[ -f $SERVICE_FILE ] && [ $FORCE_DEFAULTS = "n" ] && return
|
||||
report_status "Installing system start script..."
|
||||
sudo groupadd -f moonraker-admin
|
||||
sudo /bin/sh -c "cat > ${SERVICE_FILE}" << EOF
|
||||
#Systemd service file for moonraker
|
||||
[Unit]
|
||||
Description=API Server for Klipper
|
||||
Requires=network-online.target
|
||||
After=network-online.target
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=$USER
|
||||
SupplementaryGroups=moonraker-admin
|
||||
RemainAfterExit=yes
|
||||
WorkingDirectory=${SRCDIR}
|
||||
ExecStart=${LAUNCH_CMD} -c ${CONFIG_PATH} -l ${LOG_PATH}
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
EOF
|
||||
# Use systemctl to enable the klipper systemd service script
|
||||
if [ $DISABLE_SYSTEMCTL = "n" ]; then
|
||||
sudo systemctl enable moonraker.service
|
||||
sudo systemctl daemon-reload
|
||||
fi
|
||||
}
|
||||
|
||||
check_polkit_rules()
|
||||
{
|
||||
if [ ! -x "$(command -v pkaction)" ]; then
|
||||
return
|
||||
fi
|
||||
POLKIT_VERSION="$( pkaction --version | grep -Po "(\d?\.\d+)" )"
|
||||
if [ "$POLKIT_VERSION" = "0.105" ]; then
|
||||
POLKIT_LEGACY_FILE="/etc/polkit-1/localauthority/50-local.d/10-moonraker.pkla"
|
||||
# legacy policykit rules don't give users other than root read access
|
||||
if sudo [ ! -f $POLKIT_LEGACY_FILE ]; then
|
||||
echo -e "\n*** No PolicyKit Rules detected, run 'set-policykit-rules.sh'"
|
||||
echo "*** if you wish to grant Moonraker authorization to manage"
|
||||
echo "*** system services, reboot/shutdown the system, and update"
|
||||
echo "*** packages."
|
||||
fi
|
||||
else
|
||||
POLKIT_FILE="/etc/polkit-1/rules.d/moonraker.rules"
|
||||
POLKIT_USR_FILE="/usr/share/polkit-1/rules.d/moonraker.rules"
|
||||
if [ ! -f $POLKIT_FILE ] && [ ! -f $POLKIT_USR_FILE ]; then
|
||||
echo -e "\n*** No PolicyKit Rules detected, run 'set-policykit-rules.sh'"
|
||||
echo "*** if you wish to grant Moonraker authorization to manage"
|
||||
echo "*** system services, reboot/shutdown the system, and update"
|
||||
echo "*** packages."
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Step 6: Start server
|
||||
start_software()
|
||||
{
|
||||
report_status "Launching Moonraker API Server..."
|
||||
sudo systemctl restart moonraker
|
||||
}
|
||||
|
||||
# Helper functions
|
||||
report_status()
|
||||
{
|
||||
echo -e "\n\n###### $1"
|
||||
}
|
||||
|
||||
verify_ready()
|
||||
{
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
echo "This script must not run as root"
|
||||
exit -1
|
||||
fi
|
||||
}
|
||||
|
||||
# Force script to exit if an error occurs
|
||||
set -e
|
||||
|
||||
# Find SRCDIR from the pathname of this script
|
||||
SRCDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )"
|
||||
LAUNCH_CMD="${PYTHONDIR}/bin/python ${SRCDIR}/moonraker/moonraker.py"
|
||||
|
||||
# Parse command line arguments
|
||||
while getopts "rfzc:l:" arg; do
|
||||
case $arg in
|
||||
r) REBUILD_ENV="y";;
|
||||
f) FORCE_DEFAULTS="y";;
|
||||
z) DISABLE_SYSTEMCTL="y";;
|
||||
c) CONFIG_PATH=$OPTARG;;
|
||||
l) LOG_PATH=$OPTARG;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Run installation steps defined above
|
||||
verify_ready
|
||||
cleanup_legacy
|
||||
install_packages
|
||||
create_virtualenv
|
||||
install_script
|
||||
check_polkit_rules
|
||||
if [ $DISABLE_SYSTEMCTL = "n" ]; then
|
||||
start_software
|
||||
fi
|
||||
18
scripts/moonraker-requirements.txt
Normal file
18
scripts/moonraker-requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
# Python dependencies for Moonraker
|
||||
tornado==6.1.0
|
||||
pyserial==3.4
|
||||
pyserial-asyncio==0.6
|
||||
pillow==9.0.1
|
||||
lmdb==1.2.1
|
||||
streaming-form-data==1.8.1
|
||||
distro==1.5.0
|
||||
inotify-simple==1.3.5
|
||||
libnacl==1.7.2
|
||||
paho-mqtt==1.5.1
|
||||
pycurl==7.44.1
|
||||
zeroconf==0.37.0
|
||||
preprocess-cancellation==0.2.0
|
||||
jinja2==3.0.3
|
||||
dbus-next==0.2.3
|
||||
apprise==0.9.7
|
||||
ldap3==2.9.1
|
||||
209
scripts/pk-enum-convertor.py
Normal file
209
scripts/pk-enum-convertor.py
Normal file
@@ -0,0 +1,209 @@
|
||||
#! /usr/bin/python3
|
||||
#
|
||||
# The original enum-converter.py may be found at:
|
||||
# https://github.com/PackageKit/PackageKit/blob/b64ee9dfa707d5dd2b93c8eebe9930a55fcde108/lib/python/enum-convertor.py
|
||||
#
|
||||
# Copyright (C) 2008 - 2012 PackageKit Authors
|
||||
#
|
||||
# Licensed under the GNU General Public License Version 2.0
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# The following modifications have been made to the original
|
||||
# script:
|
||||
# * Print the time of conversion
|
||||
# * Extract and print the original license of the source
|
||||
# * Enumerations are extracted from the header file to preserve
|
||||
# order
|
||||
# * Use Python "Flag" Enumerations
|
||||
# * Extract comments and include them as docstrings
|
||||
# * Introduce a string constant validation mode. This extracts
|
||||
# strings from pk-enum.c, then compares to the calculated
|
||||
# strings from pk-enum.h.
|
||||
#
|
||||
# Copyright (C) 2022 Eric Callahan <arksine.code@gmail.com>
|
||||
#
|
||||
# Usage:
|
||||
# pk-enum-converter.py pk_enum.h > enums.py
|
||||
#
|
||||
# Enum String Validation Mode:
|
||||
# pk-enum-converter.py pk_enum.h pk_enum.c
|
||||
#
|
||||
# The pk_enum source files, pk-enum.c and pk-enum.h, can be found in the
|
||||
# PackageKit GitHub repo:
|
||||
# https://github.com/PackageKit/PackageKit/blob/main/lib/packagekit-glib2/pk-enum.c
|
||||
# https://github.com/PackageKit/PackageKit/blob/main/lib/packagekit-glib2/pk-enum.h
|
||||
#
|
||||
from __future__ import print_function
|
||||
|
||||
from re import compile, DOTALL, MULTILINE
|
||||
import time
|
||||
import sys
|
||||
import pathlib
|
||||
import textwrap
|
||||
|
||||
HEADER = \
|
||||
'''
|
||||
# This file was autogenerated from %s by pk-enum-converter.py
|
||||
# on %s UTC
|
||||
#
|
||||
# License for original source:
|
||||
#
|
||||
%s
|
||||
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
from enum import Flag, auto
|
||||
|
||||
class PkFlag(Flag):
|
||||
@classmethod
|
||||
def from_pkstring(cls, pkstring: str):
|
||||
for name, member in cls.__members__.items():
|
||||
if member.pkstring == pkstring:
|
||||
return cls(member.value)
|
||||
# Return "unknown" flag
|
||||
return cls(1)
|
||||
|
||||
@classmethod
|
||||
def from_index(cls, index: int):
|
||||
return cls(1 << index)
|
||||
|
||||
@property
|
||||
def pkstring(self) -> str:
|
||||
if self.name is None:
|
||||
return " | ".join([f.pkstring for f in self])
|
||||
return self.name.lower().replace("_", "-")
|
||||
|
||||
@property
|
||||
def desc(self) -> str:
|
||||
if self.name is None:
|
||||
return ", ".join([f.desc for f in self])
|
||||
description = self.name.lower().replace("_", " ")
|
||||
return description.capitalize()
|
||||
|
||||
@property
|
||||
def index(self) -> int:
|
||||
return self.value.bit_length() - 1
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
def __iter__(self):
|
||||
for i in range(self._value_.bit_length()):
|
||||
val = 1 << i
|
||||
if val & self._value_ == val:
|
||||
yield self.__class__(val)
|
||||
''' # noqa: E122
|
||||
|
||||
FILTER_PKSTRING = \
|
||||
''' @property
|
||||
def pkstring(self) -> str:
|
||||
pks = self.name
|
||||
if pks is None:
|
||||
return " | ".join([f.pkstring for f in self])
|
||||
if pks in ["DEVELOPMENT", "NOT_DEVELOPMENT"]:
|
||||
pks = pks[:-6]
|
||||
if pks[:4] == "NOT_":
|
||||
pks = "~" + pks[4:]
|
||||
return pks.lower().replace("_", "-")
|
||||
''' # noqa: E122
|
||||
|
||||
ERROR_PROPS = \
|
||||
''' @property
|
||||
def pkstring(self) -> str:
|
||||
if self == Error.UPDATE_FAILED_DUE_TO_RUNNING_PROCESS:
|
||||
return "failed-due-to-running-process"
|
||||
return super().pkstring
|
||||
''' # noqa: E122
|
||||
|
||||
ALIASES = {
|
||||
"Error.OOM": "OUT_OF_MEMORY"
|
||||
}
|
||||
|
||||
header_enum = compile(r"/\*\*\n(.+?)@PK_[A-Z_]+_ENUM_LAST:\s+\*\s+(.+?)"
|
||||
r"\s+\*\*/\s+typedef enum {(.+?)} Pk(.+?)Enum",
|
||||
DOTALL | MULTILINE)
|
||||
header_value = compile(r"(PK_[A-Z_]+_ENUM)_([A-Z0-9_]+)")
|
||||
header_desc = compile(r"@PK_[A-Z_]+_ENUM_([A-Z_]+):(.*)")
|
||||
license = compile(r"(Copyright.+?)\*/", DOTALL | MULTILINE)
|
||||
enum_h_name = sys.argv[1]
|
||||
header = pathlib.Path(enum_h_name).read_text()
|
||||
|
||||
# Get License
|
||||
lic_match = license.search(header)
|
||||
assert lic_match is not None
|
||||
lic_parts = lic_match.group(1).split("\n")
|
||||
lic = "\n".join([("# " + p.lstrip("* ")).rstrip(" ") for p in lic_parts])
|
||||
|
||||
|
||||
if len(sys.argv) > 2:
|
||||
# Validation Mode, extract strings from the source file, compare to
|
||||
# those calculated from the enums in the header file
|
||||
enum_to_string = {}
|
||||
enum = compile(r"static const PkEnumMatch enum_([^\]]+)\[\] = {(.*?)};",
|
||||
DOTALL | MULTILINE)
|
||||
value = compile(r"(PK_[A-Z_]+_ENUM_[A-Z0-9_]+),\s+\"([^\"]+)\"")
|
||||
enum_c_name = sys.argv[2]
|
||||
inp = pathlib.Path(enum_c_name).read_text()
|
||||
for (name, data) in enum.findall(inp):
|
||||
for (enum_name, string) in value.findall(data):
|
||||
enum_to_string[enum_name] = string
|
||||
for (desc_data, comments, data, name) in header_enum.findall(header):
|
||||
for (prefix, short_name) in header_value.findall(data):
|
||||
if short_name == "LAST":
|
||||
continue
|
||||
# Validation Mode
|
||||
enum_name = f"{prefix}_{short_name}"
|
||||
string = enum_to_string[enum_name]
|
||||
calc_string = short_name.lower().replace("_", "-")
|
||||
if calc_string[:4] == "not-" and name == "Filter":
|
||||
calc_string = "~" + calc_string[4:]
|
||||
if calc_string != string:
|
||||
print(
|
||||
f"Calculated String Mismatch: {name}.{short_name}\n"
|
||||
f"Calculated: {calc_string}\n"
|
||||
f"Extracted: {string}\n")
|
||||
exit(0)
|
||||
|
||||
print(HEADER % (enum_h_name, time.asctime(time.gmtime()), lic))
|
||||
# Use the header file for correct enum ordering
|
||||
for (desc_data, comments, data, name) in header_enum.findall(header):
|
||||
|
||||
print(f"\nclass {name}(PkFlag):")
|
||||
# Print Docstring
|
||||
print(' """')
|
||||
comments = [(" " * 4 + c.lstrip("* ")).rstrip(" ")
|
||||
for c in comments.splitlines()]
|
||||
for comment in comments:
|
||||
comment = comment.expandtabs(4)
|
||||
if len(comment) > 79:
|
||||
comment = "\n".join(textwrap.wrap(
|
||||
comment, 79, subsequent_indent=" ",
|
||||
tabsize=4))
|
||||
print(comment)
|
||||
print("")
|
||||
for (item, desc) in header_desc.findall(desc_data):
|
||||
line = f" * {name}.{item}: {desc}".rstrip()
|
||||
if len(line) > 79:
|
||||
print(f" * {name}.{item}:")
|
||||
print(f" {desc}")
|
||||
else:
|
||||
print(line)
|
||||
print(' """')
|
||||
if name == "Filter":
|
||||
print(FILTER_PKSTRING)
|
||||
elif name == "Error":
|
||||
print(ERROR_PROPS)
|
||||
aliases = []
|
||||
for (prefix, short_name) in header_value.findall(data):
|
||||
if short_name == "LAST":
|
||||
continue
|
||||
long_name = f"{name}.{short_name}"
|
||||
if long_name in ALIASES:
|
||||
alias = ALIASES[long_name]
|
||||
aliases.append((short_name, alias))
|
||||
short_name = alias
|
||||
# Print Enums
|
||||
print(f" {short_name} = auto()")
|
||||
for name, alias in aliases:
|
||||
print(f" {name} = {alias}")
|
||||
151
scripts/set-policykit-rules.sh
Normal file
151
scripts/set-policykit-rules.sh
Normal file
@@ -0,0 +1,151 @@
|
||||
#!/bin/bash
|
||||
# This script installs Moonraker's PolicyKit Rules used to grant access
|
||||
|
||||
POLKIT_LEGACY_DIR="/etc/polkit-1/localauthority/50-local.d"
|
||||
POLKIT_DIR="/etc/polkit-1/rules.d"
|
||||
POLKIT_USR_DIR="/usr/share/polkit-1/rules.d"
|
||||
MOONRAKER_UNIT="/etc/systemd/system/moonraker.service"
|
||||
MOONRAKER_GID="-1"
|
||||
|
||||
check_moonraker_service()
|
||||
{
|
||||
|
||||
# Force Add the moonraker-admin group
|
||||
sudo groupadd -f moonraker-admin
|
||||
[ ! -f $MOONRAKER_UNIT ] && return
|
||||
# Make sure the unit file contains supplementary group
|
||||
HAS_SUPP="$( grep -cm1 "SupplementaryGroups=moonraker-admin" $MOONRAKER_UNIT || true )"
|
||||
[ "$HAS_SUPP" -eq 1 ] && return
|
||||
report_status "Adding moonraker-admin supplementary group to $MOONRAKER_UNIT"
|
||||
sudo sed -i "/^Type=simple$/a SupplementaryGroups=moonraker-admin" $MOONRAKER_UNIT
|
||||
sudo systemctl daemon-reload
|
||||
}
|
||||
|
||||
add_polkit_legacy_rules()
|
||||
{
|
||||
RULE_FILE="${POLKIT_LEGACY_DIR}/10-moonraker.pkla"
|
||||
report_status "Installing Moonraker PolicyKit Rules (Legacy) to ${RULE_FILE}..."
|
||||
ACTIONS="org.freedesktop.systemd1.manage-units"
|
||||
ACTIONS="${ACTIONS};org.freedesktop.login1.power-off"
|
||||
ACTIONS="${ACTIONS};org.freedesktop.login1.power-off-multiple-sessions"
|
||||
ACTIONS="${ACTIONS};org.freedesktop.login1.reboot"
|
||||
ACTIONS="${ACTIONS};org.freedesktop.login1.reboot-multiple-sessions"
|
||||
ACTIONS="${ACTIONS};org.freedesktop.packagekit.*"
|
||||
sudo /bin/sh -c "cat > ${RULE_FILE}" << EOF
|
||||
[moonraker permissions]
|
||||
Identity=unix-user:$USER
|
||||
Action=$ACTIONS
|
||||
ResultAny=yes
|
||||
EOF
|
||||
}
|
||||
|
||||
add_polkit_rules()
|
||||
{
|
||||
if [ ! -x "$(command -v pkaction)" ]; then
|
||||
echo "PolicyKit not installed"
|
||||
exit 1
|
||||
fi
|
||||
POLKIT_VERSION="$( pkaction --version | grep -Po "(\d+\.?\d*)" )"
|
||||
report_status "PolicyKit Version ${POLKIT_VERSION} Detected"
|
||||
if [ "$POLKIT_VERSION" = "0.105" ]; then
|
||||
# install legacy pkla file
|
||||
add_polkit_legacy_rules
|
||||
return
|
||||
fi
|
||||
RULE_FILE=""
|
||||
if [ -d $POLKIT_USR_DIR ]; then
|
||||
RULE_FILE="${POLKIT_USR_DIR}/moonraker.rules"
|
||||
elif [ -d $POLKIT_DIR ]; then
|
||||
RULE_FILE="${POLKIT_DIR}/moonraker.rules"
|
||||
else
|
||||
echo "PolicyKit rules folder not detected"
|
||||
exit 1
|
||||
fi
|
||||
report_status "Installing PolicyKit Rules to ${RULE_FILE}..."
|
||||
MOONRAKER_GID=$( getent group moonraker-admin | awk -F: '{printf "%d", $3}' )
|
||||
sudo /bin/sh -c "cat > ${RULE_FILE}" << EOF
|
||||
// Allow Moonraker User to manage systemd units, reboot and shutdown
|
||||
// the system
|
||||
polkit.addRule(function(action, subject) {
|
||||
if ((action.id == "org.freedesktop.systemd1.manage-units" ||
|
||||
action.id == "org.freedesktop.login1.power-off" ||
|
||||
action.id == "org.freedesktop.login1.power-off-multiple-sessions" ||
|
||||
action.id == "org.freedesktop.login1.reboot" ||
|
||||
action.id == "org.freedesktop.login1.reboot-multiple-sessions" ||
|
||||
action.id.startsWith("org.freedesktop.packagekit.")) &&
|
||||
subject.user == "$USER") {
|
||||
// Only allow processes with the "moonraker-admin" supplementary group
|
||||
// access
|
||||
var regex = "^Groups:.+?\\\s$MOONRAKER_GID[\\\s\\\0]";
|
||||
var cmdpath = "/proc/" + subject.pid.toString() + "/status";
|
||||
try {
|
||||
polkit.spawn(["grep", "-Po", regex, cmdpath]);
|
||||
return polkit.Result.YES;
|
||||
} catch (error) {
|
||||
return polkit.Result.NOT_HANDLED;
|
||||
}
|
||||
}
|
||||
});
|
||||
EOF
|
||||
}
|
||||
|
||||
clear_polkit_rules()
|
||||
{
|
||||
report_status "Removing all Moonraker PolicyKit rules"
|
||||
sudo rm -f "${POLKIT_LEGACY_DIR}/10-moonraker.pkla"
|
||||
sudo rm -f "${POLKIT_USR_DIR}/moonraker.rules"
|
||||
sudo rm -f "${POLKIT_DIR}/moonraker.rules"
|
||||
}
|
||||
|
||||
# Helper functions
|
||||
report_status()
|
||||
{
|
||||
echo -e "\n\n###### $1"
|
||||
}
|
||||
|
||||
verify_ready()
|
||||
{
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
echo "This script must not run as root"
|
||||
exit -1
|
||||
fi
|
||||
}
|
||||
|
||||
CLEAR="n"
|
||||
ROOT="n"
|
||||
DISABLE_SYSTEMCTL="n"
|
||||
|
||||
# Parse command line arguments
|
||||
while :; do
|
||||
case $1 in
|
||||
-c|--clear)
|
||||
CLEAR="y"
|
||||
;;
|
||||
-r|--root)
|
||||
ROOT="y"
|
||||
;;
|
||||
-z|--disable-systemctl)
|
||||
DISABLE_SYSTEMCTL="y"
|
||||
;;
|
||||
*)
|
||||
break
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
if [ "$ROOT" = "n" ]; then
|
||||
verify_ready
|
||||
fi
|
||||
|
||||
if [ "$CLEAR" = "y" ]; then
|
||||
clear_polkit_rules
|
||||
else
|
||||
set -e
|
||||
check_moonraker_service
|
||||
add_polkit_rules
|
||||
if [ $DISABLE_SYSTEMCTL = "n" ]; then
|
||||
report_status "Restarting Moonraker..."
|
||||
sudo systemctl restart moonraker
|
||||
fi
|
||||
fi
|
||||
189
scripts/sudo_fix.sh
Normal file
189
scripts/sudo_fix.sh
Normal file
@@ -0,0 +1,189 @@
|
||||
#!/bin/bash
|
||||
|
||||
# moonraker-sudo (mnrkrsudo)
|
||||
# Provides a specified Group that is intended to elevate user privileges
|
||||
# to help moonraker with sudo actions, if in CustomPIOS Images with
|
||||
# Module "password-for-sudo".
|
||||
#
|
||||
# Partially used functions from Arcsine
|
||||
#
|
||||
# Copyright (C) 2020 Stephan Wendel <me@stephanwe.de>
|
||||
#
|
||||
# This file may be distributed under the terms of the GNU GPLv3 license
|
||||
|
||||
### Exit on Errors
|
||||
set -e
|
||||
|
||||
### Configuration
|
||||
|
||||
SUDOERS_DIR='/etc/sudoers.d'
|
||||
SUDOERS_FILE='020-sudo-for-moonraker'
|
||||
NEW_GROUP='mnrkrsudo'
|
||||
|
||||
|
||||
### Functions
|
||||
|
||||
verify_ready()
|
||||
{
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
echo "This script must not run as root"
|
||||
exit -1
|
||||
fi
|
||||
}
|
||||
|
||||
create_sudoers_file()
|
||||
{
|
||||
|
||||
SCRIPT_TEMP_PATH=/tmp
|
||||
|
||||
report_status "Creating ${SUDOERS_FILE} ..."
|
||||
sudo rm -f $SCRIPT_TEMP_PATH/$SUDOERS_FILE
|
||||
sudo sed "s/GROUPNAME/$NEW_GROUP/g" > $SCRIPT_TEMP_PATH/$SUDOERS_FILE << '#EOF'
|
||||
|
||||
### Elevate moonraker API rights
|
||||
### Do NOT allow Command Parts only Full Commands
|
||||
### for example
|
||||
###
|
||||
### /sbin/systemctl "reboot", /sbin/apt "update", .....
|
||||
|
||||
Defaults!/usr/bin/apt-get env_keep +="DEBIAN_FRONTEND"
|
||||
|
||||
Cmnd_Alias REBOOT = /sbin/shutdown -r now, /bin/systemctl "reboot"
|
||||
Cmnd_Alias SHUTDOWN = /sbin/shutdown now, /sbin/shutdown -h now, /bin/systemctl "poweroff"
|
||||
Cmnd_Alias APT = /usr/bin/apt-get
|
||||
Cmnd_Alias SYSTEMCTL = /bin/systemctl
|
||||
|
||||
|
||||
|
||||
%GROUPNAME ALL=(ALL) NOPASSWD: REBOOT, SHUTDOWN, APT, SYSTEMCTL
|
||||
|
||||
#EOF
|
||||
|
||||
report_status "\e[1;32m...done\e[0m"
|
||||
}
|
||||
|
||||
update_env()
|
||||
{
|
||||
report_status "Export System Variable: DEBIAN_FRONTEND=noninteractive"
|
||||
sudo /bin/sh -c 'echo "DEBIAN_FRONTEND=noninteractive" >> /etc/environment'
|
||||
}
|
||||
|
||||
verify_syntax()
|
||||
{
|
||||
if [ -n "$(whereis -b visudo | awk '{print $2}')" ]; then
|
||||
|
||||
report_status "\e[1;33mVerifying Syntax of ${SUDOERS_FILE}\e[0m\n"
|
||||
|
||||
if [ $(LANG=C sudo visudo -cf $SCRIPT_TEMP_PATH/$SUDOERS_FILE | grep -c "OK" ) -eq 1 ];
|
||||
then
|
||||
VERIFY_STATUS=0
|
||||
report_status "\e[1;32m$(LANG=C sudo visudo -cf $SCRIPT_TEMP_PATH/$SUDOERS_FILE)\e[0m"
|
||||
else
|
||||
report_status "\e[1;31mSyntax Error:\e[0m Check File: $SCRIPT_TEMP_PATH/$SUDOERS_FILE"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
VERIFY_STATUS=0
|
||||
report_status "\e[1;31mCommand 'visudo' not found. Skip verifying sudoers file.\e[0m"
|
||||
fi
|
||||
}
|
||||
|
||||
install_sudoers_file()
|
||||
{
|
||||
verify_syntax
|
||||
if [ $VERIFY_STATUS -eq 0 ];
|
||||
then
|
||||
report_status "Copying $SCRIPT_TEMP_PATH/$SUDOERS_FILE to $SUDOERS_DIR/$SUDOERS_FILE"
|
||||
sudo chmod 0440 $SCRIPT_TEMP_PATH/$SUDOERS_FILE
|
||||
sudo cp --preserve=mode $SCRIPT_TEMP_PATH/$SUDOERS_FILE $SUDOERS_DIR/$SUDOERS_FILE
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_update_sudoers_file()
|
||||
{
|
||||
if [ -e "$SUDOERS_DIR/$SUDOERS_FILE" ];
|
||||
then
|
||||
create_sudoers_file
|
||||
if [ -z $(sudo diff $SCRIPT_TEMP_PATH/$SUDOERS_FILE $SUDOERS_DIR/$SUDOERS_FILE) ]
|
||||
then
|
||||
report_status "No need to update $SUDOERS_DIR/$SUDOERS_FILE"
|
||||
else
|
||||
report_status "$SUDOERS_DIR/$SUDOERS_FILE needs to be updated."
|
||||
install_sudoers_file
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
add_new_group()
|
||||
{
|
||||
sudo addgroup --system $NEW_GROUP &> /dev/null
|
||||
report_status "\e[1;32m...done\e[0m"
|
||||
}
|
||||
|
||||
add_user_to_group()
|
||||
{
|
||||
sudo usermod -aG $NEW_GROUP $USER &> /dev/null
|
||||
report_status "\e[1;32m...done\e[0m"
|
||||
}
|
||||
|
||||
adduser_hint()
|
||||
{
|
||||
report_status "\e[1;31mYou have to REBOOT to take changes effect!\e[0m"
|
||||
}
|
||||
|
||||
# Helper functions
|
||||
report_status()
|
||||
{
|
||||
echo -e "\n\n###### $1"
|
||||
}
|
||||
|
||||
clean_temp()
|
||||
{
|
||||
sudo rm -f $SCRIPT_TEMP_PATH/$SUDOERS_FILE
|
||||
}
|
||||
### Main
|
||||
|
||||
verify_ready
|
||||
|
||||
if [ -e "$SUDOERS_DIR/$SUDOERS_FILE" ] && [ $(sudo cat /etc/gshadow | grep -c "${NEW_GROUP}") -eq 1 ] && [ $(groups | grep -c "$NEW_GROUP") -eq 1 ];
|
||||
then
|
||||
check_update_sudoers_file
|
||||
report_status "\e[1;32mEverything is setup, nothing to do...\e[0m\n"
|
||||
exit 0
|
||||
|
||||
else
|
||||
|
||||
if [ -e "$SUDOERS_DIR/$SUDOERS_FILE" ];
|
||||
then
|
||||
report_status "\e[1;32mFile exists:\e[0m ${SUDOERS_FILE}"
|
||||
check_update_sudoers_file
|
||||
else
|
||||
report_status "\e[1;31mFile not found:\e[0m ${SUDOERS_FILE}\n"
|
||||
create_sudoers_file
|
||||
install_sudoers_file
|
||||
fi
|
||||
|
||||
if [ $(sudo cat /etc/gshadow | grep -c "${NEW_GROUP}") -eq 1 ];
|
||||
then
|
||||
report_status "Group ${NEW_GROUP} already exists..."
|
||||
else
|
||||
report_status "Group ${NEW_GROUP} will be added..."
|
||||
add_new_group
|
||||
fi
|
||||
|
||||
if [ $(groups | grep -c "$NEW_GROUP") -eq 1 ];
|
||||
then
|
||||
report_status "User ${USER} is already in $NEW_GROUP..."
|
||||
else
|
||||
report_status "Adding User ${USER} to Group $NEW_GROUP..."
|
||||
add_user_to_group
|
||||
adduser_hint
|
||||
fi
|
||||
fi
|
||||
|
||||
update_env
|
||||
clean_temp
|
||||
exit 0
|
||||
71
scripts/tag-release.sh
Normal file
71
scripts/tag-release.sh
Normal file
@@ -0,0 +1,71 @@
|
||||
#! /bin/bash
|
||||
# Helper Script for Tagging Moonraker Releases
|
||||
|
||||
PRINT_ONLY="n"
|
||||
KLIPPER_PATH="$HOME/klipper"
|
||||
REMOTE=""
|
||||
DESCRIBE="describe --always --tags --long"
|
||||
|
||||
# Get Tag and Klipper Path
|
||||
TAG=$1
|
||||
shift
|
||||
while :; do
|
||||
case $1 in
|
||||
-k|--klipper-path)
|
||||
shift
|
||||
KLIPPER_PATH=$1
|
||||
;;
|
||||
-p|--print)
|
||||
PRINT_ONLY="y"
|
||||
;;
|
||||
*)
|
||||
break
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
|
||||
if [ ! -d "$KLIPPER_PATH/.git" ]; then
|
||||
echo "Invalid Klipper Path: $KLIPPER_PATH"
|
||||
fi
|
||||
echo "Klipper found at $KLIPPER_PATH"
|
||||
GIT_CMD="git -C $KLIPPER_PATH"
|
||||
|
||||
ALL_REMOTES="$( $GIT_CMD remote | tr '\n' ' ' | awk '{gsub(/^ +| +$/,"")} {print $0}' )"
|
||||
echo "Found Klipper Remotes: $ALL_REMOTES"
|
||||
for val in $ALL_REMOTES; do
|
||||
REMOTE_URL="$( $GIT_CMD remote get-url $val | awk '{gsub(/^ +| +$/,"")} {print tolower($0)}' )"
|
||||
match="$( echo $REMOTE_URL | grep -Ecm1 '(klipper3d|kevinoconnor)/klipper'|| true )"
|
||||
if [ "$match" -eq 1 ]; then
|
||||
echo "Found Remote $val"
|
||||
REMOTE="$val"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
[ "$REMOTE" = "" ] && echo "Unable to find a valid remote" && exit 1
|
||||
|
||||
$GIT_CMD fetch $REMOTE
|
||||
|
||||
DESC="$( $GIT_CMD $DESCRIBE $REMOTE/master | awk '{gsub(/^ +| +$/,"")} {print $0}' )"
|
||||
HASH="$( $GIT_CMD rev-parse $REMOTE/master | awk '{gsub(/^ +| +$/,"")} {print $0}' )"
|
||||
|
||||
if [ "$PRINT_ONLY" = "y" ]; then
|
||||
echo "
|
||||
Tag: $TAG
|
||||
Repo: Klipper
|
||||
Branch: Master
|
||||
Version: $DESC
|
||||
Commit: $HASH
|
||||
"
|
||||
else
|
||||
echo "Adding Tag $TAG"
|
||||
git tag -a $TAG -m "Moonraker Version $TAG
|
||||
Klipper Tag Data
|
||||
repo: klipper
|
||||
branch: master
|
||||
version: $DESC
|
||||
commit: $HASH
|
||||
"
|
||||
fi
|
||||
71
scripts/uninstall-moonraker.sh
Normal file
71
scripts/uninstall-moonraker.sh
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
# Moonraker uninstall script for Raspbian/Raspberry Pi OS
|
||||
|
||||
stop_service() {
|
||||
# Stop Moonraker Service
|
||||
echo "#### Stopping Moonraker Service.."
|
||||
sudo systemctl stop moonraker
|
||||
}
|
||||
|
||||
remove_service() {
|
||||
# Remove Moonraker LSB/systemd service
|
||||
echo
|
||||
echo "#### Removing Moonraker Service.."
|
||||
if [ -f "/etc/init.d/moonraker" ]; then
|
||||
# legacy installation, remove the LSB service
|
||||
sudo update-rc.d -f moonraker remove
|
||||
sudo rm -f /etc/init.d/moonraker
|
||||
sudo rm -f /etc/default/moonraker
|
||||
else
|
||||
# Remove systemd installation
|
||||
sudo systemctl disable moonraker
|
||||
sudo rm -f /etc/systemd/system/moonraker.service
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl reset-failed
|
||||
fi
|
||||
}
|
||||
|
||||
remove_files() {
|
||||
# Remove API Key file from older versions
|
||||
if [ -e ~/.klippy_api_key ]; then
|
||||
echo "Removing legacy API Key"
|
||||
rm ~/.klippy_api_key
|
||||
fi
|
||||
|
||||
# Remove API Key file from recent versions
|
||||
if [ -e ~/.moonraker_api_key ]; then
|
||||
echo "Removing API Key"
|
||||
rm ~/.moonraker_api_key
|
||||
fi
|
||||
|
||||
# Remove virtualenv
|
||||
if [ -d ~/moonraker-env ]; then
|
||||
echo "Removing virtualenv..."
|
||||
rm -rf ~/moonraker-env
|
||||
else
|
||||
echo "No moonraker virtualenv found"
|
||||
fi
|
||||
|
||||
# Notify user of method to remove Moonraker source code
|
||||
echo
|
||||
echo "The Moonraker system files and virtualenv have been removed."
|
||||
echo
|
||||
echo "The following command is typically used to remove source files:"
|
||||
echo " rm -rf ~/moonraker"
|
||||
echo
|
||||
echo "You may also wish to uninstall nginx:"
|
||||
echo " sudo apt-get remove nginx"
|
||||
}
|
||||
|
||||
verify_ready()
|
||||
{
|
||||
if [ "$EUID" -eq 0 ]; then
|
||||
echo "This script must not run as root"
|
||||
exit -1
|
||||
fi
|
||||
}
|
||||
|
||||
verify_ready
|
||||
stop_service
|
||||
remove_service
|
||||
remove_files
|
||||
Reference in New Issue
Block a user