Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/automatic-doc-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ on:
push:
branches: [ main ]
pull_request:
paths:
- 'docs/**' # Only run on changes to the docs directory

workflow_dispatch:
# Manual trigger
Expand Down
14 changes: 8 additions & 6 deletions .github/workflows/check-removed-urls.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,22 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout PR branch
uses: actions/checkout@v4
uses: actions/checkout@v5
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref }}
# This implicitly gets the PR branch. Making it explicit causes problems
# with private forks, but it is equivalent to the following:
# repository: ${{ github.event.pull_request.head.repo.full_name }}
# ref: ${{ github.event.pull_request.head.ref }}
fetch-depth: 0
path: compare
- name: Checkout base branch
uses: actions/checkout@v4
uses: actions/checkout@v5
with:
ref: ${{ github.event.pull_request.base.ref }}
repository: ${{ github.event.pull_request.base.repo.full_name }}
fetch-depth: 0
path: base
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
- name: Build docs
run: |
for dir in compare base; do
Expand Down Expand Up @@ -51,4 +53,4 @@ jobs:
echo "$removed"
echo "Please ensure removed pages are redirected"
exit 1
fi
fi
15 changes: 11 additions & 4 deletions .github/workflows/markdown-style-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,24 @@ on:
push:
branches:
- main
paths:
- 'docs/**' # Only run on changes to the docs directory
pull_request:
branches:
- '*'
paths:
- 'docs/**' # Only run on changes to the docs directory

jobs:
markdown-lint:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0
- uses: DavidAnson/markdownlint-cli2-action@v16
with:
config: "docs/.sphinx/.markdownlint.json"
- name: Create venv
working-directory: "docs"
run: make install
- name: Lint markdown
working-directory: "docs"
run: make lint-md
9 changes: 9 additions & 0 deletions docs/.custom_wordlist.txt
Original file line number Diff line number Diff line change
Expand Up @@ -66,5 +66,14 @@ microk
memcached
sideload
snapstorage
nonces?
pymemcache
unscanned
Canonical's
ctrl
postgres
jq
sideloaded
iptable
PGPy
airgapped
27 changes: 0 additions & 27 deletions docs/.sphinx/.markdownlint.json

This file was deleted.

43 changes: 43 additions & 0 deletions docs/.sphinx/.pymarkdown.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
{
"plugins": {
"selectively_enable_rules": true,
"heading-style": {
"enabled": true,
"style": "atx"
},
"commands-show-output": {
"enabled": true
},
"no-missing-space-atx": {
"enabled": true
},
"heading-start-left": {
"enabled": true
},
"no-trailing-punctuation": {
"enabled": true,
"punctuation": ".,;。,;"
},
"blanks-around-fences": {
"enabled": true,
"list_items": false
},
"blanks-around-lists": {
"enabled": true
},
"hr-style": {
"enabled": true
},
"no-empty-links": {
"enabled": true
},
"no-alt-text": {
"enabled": true
}
},
"extensions": {
"front-matter" : {
"enabled" : true
}
}
}
16 changes: 8 additions & 8 deletions docs/.sphinx/get_vale_conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@
def clone_repo_and_copy_paths(file_source_dest, overwrite=False):
"""
Clone the repository to a temporary directory and copy required files

Args:
file_source_dest: dictionary of file paths to copy from the repository,
and their destination paths
overwrite: boolean flag to overwrite existing files in the destination

Returns:
bool: True if all files were copied successfully, False otherwise
"""
Expand All @@ -52,8 +52,8 @@ def clone_repo_and_copy_paths(file_source_dest, overwrite=False):

try:
result = subprocess.run(
clone_cmd,
capture_output=True,
clone_cmd,
capture_output=True,
text=True,
check=True
)
Expand All @@ -73,7 +73,7 @@ def clone_repo_and_copy_paths(file_source_dest, overwrite=False):
continue

if not copy_files_to_path(source_path, dest, overwrite):
is_copy_success = False
is_copy_success = False
logging.error("Failed to copy %s to %s", source_path, dest)

# Clean up temporary directory
Expand All @@ -85,12 +85,12 @@ def clone_repo_and_copy_paths(file_source_dest, overwrite=False):
def copy_files_to_path(source_path, dest_path, overwrite=False):
"""
Copy a file or directory from source to destination

Args:
source_path: Path to the source file or directory
dest_path: Path to the destination
overwrite: Boolean flag to overwrite existing files in the destination

Returns:
bool: True if copy was successful, False otherwise
"""
Expand Down Expand Up @@ -138,7 +138,7 @@ def main():
# Parse command line arguments, default to overwrite_enabled = True
overwrite_enabled = not parse_arguments().no_overwrite

# Download into /tmp through git clone
# Download into /tmp through git clone
if not clone_repo_and_copy_paths(vale_files_dict, overwrite=overwrite_enabled):
logging.error("Failed to download files from repository")
return 1
Expand Down
94 changes: 94 additions & 0 deletions docs/.sphinx/metrics/build_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#!/usr/bin/python3

import sys
import argparse
from pathlib import Path
from html.parser import HTMLParser
from urllib.parse import urlsplit


class MetricsParser(HTMLParser):
def __init__(self):
super().__init__()
self.int_link_count = 0
self.ext_link_count = 0
self.fragment_count = 0
self.image_count = 0
self.in_object = 0

@property
def link_count(self):
return self.fragment_count + self.int_link_count + self.ext_link_count

def read(self, file):
"""
Read *file* (a file-like object with a ``read`` method returning
strings) a chunk at a time, feeding each chunk to the parser.
"""
# Ensure the parser state is reset before each file (just in case
# there's an erroneous dangling <object>)
self.reset()
self.in_object = 0
buf = ''
while True:
# Parse 1MB chunks at a time
buf = file.read(1024**2)
if not buf:
break
self.feed(buf)

def handle_starttag(self, tag, attrs):
"""
Count <a>, <img>, and <object> tags to determine the number of internal
and external links, and the number of images.
"""
attrs = dict(attrs)
if tag == 'a' and 'href' in attrs:
# If there's no href, it's an anchor; if there's no hostname
# (netloc) or path, it's just a fragment link within the page
url = urlsplit(attrs['href'])
if url.netloc:
self.ext_link_count += 1
elif url.path:
self.int_link_count += 1
else:
self.fragment_count += 1
elif tag == 'object':
# <object> tags are a bit complex as they nest to offer fallbacks
# and may contain an <img> fallback. We only want to count the
# outer-most <object> in this case
if self.in_object == 0:
self.image_count += 1
self.in_object += 1
elif tag == 'img' and self.in_object == 0:
self.image_count += 1

def handle_endtag(self, tag):
if tag == 'object':
# Never let in_object be negative
self.in_object = max(0, self.in_object - 1)


def main(args=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'build_dir', metavar='build-dir', nargs='?', default='.',
help="The directory to scan for HTML files")
config = parser.parse_args(args)

parser = MetricsParser()
for path in Path(config.build_dir).rglob('*.html'):
with path.open('r', encoding='utf-8', errors='replace') as f:
parser.read(f)

print('Summarising metrics for build files (.html)...')
print(f'\tlinks: {parser.link_count} ('
f'{parser.fragment_count} #frag…, '
f'{parser.int_link_count} /int…, '
f'{parser.ext_link_count} https://ext…'
')')
print(f'\timages: {parser.image_count}')


if __name__ == '__main__':
sys.exit(main())
15 changes: 0 additions & 15 deletions docs/.sphinx/metrics/build_metrics.sh

This file was deleted.

10 changes: 6 additions & 4 deletions docs/.sphinx/update_sp.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@
from requests.exceptions import RequestException
from packaging.version import parse as parse_version

SPHINX_DIR = os.path.join(os.getcwd(), ".sphinx")
SPHINX_DIR = os.path.abspath(os.path.dirname(__file__))
DOCS_DIR = os.path.abspath(os.path.join(SPHINX_DIR, '..'))
REQUIREMENTS = os.path.join(DOCS_DIR, "requirements.txt")
SPHINX_UPDATE_DIR = os.path.join(SPHINX_DIR, "update")
GITHUB_REPO = "canonical/sphinx-docs-starter-pack"
GITHUB_API_BASE = f"https://api.github.com/repos/{GITHUB_REPO}"
Expand Down Expand Up @@ -103,7 +105,7 @@ def main():
# Check requirements are the same
new_requirements = []
try:
with open("requirements.txt", "r") as file:
with open(REQUIREMENTS, "r") as file:
logging.debug("Checking requirements")

local_reqs = set(file.read().splitlines()) - {""}
Expand All @@ -121,7 +123,7 @@ def main():

if new_requirements != set():
print(
"You may need to add the following pacakges to your requirements.txt file:"
"You may need to add the following packages to your requirements.txt file:"
)
for r in new_requirements:
print(f"{r}\n")
Expand Down Expand Up @@ -206,7 +208,7 @@ def update_static_files():
# Writes return value for parent function
if new_file_list != []:
# Provides more information on new files
with open("NEWFILES.txt", "w") as f:
with open(f"{SPHINX_DIR}/NEWFILES.txt", "w") as f:
for entry in new_file_list:
f.write(f"{entry}\n")
logging.debug("Some downloaded files are new")
Expand Down
2 changes: 1 addition & 1 deletion docs/.sphinx/version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.2.0
1.3.0
Loading
Loading