Skip to content

Commit

Permalink
Documentation anchor links fixes (#1585)
Browse files Browse the repository at this point in the history
  • Loading branch information
daveoconnor authored Jan 9, 2025
1 parent c9f4d36 commit b8c04c4
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 7 deletions.
11 changes: 11 additions & 0 deletions core/htmlhelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,8 @@ def modernize_legacy_page(
for tag in result.find_all(tag_name, tag_attrs):
tag.attrs.pop("class")

result = convert_name_to_id(result)

# Use the base HTML to later extract the <head> and (part of) the <body>
placeholder = BeautifulSoup(base_html, "html.parser")
if isinstance(head_selector, str):
Expand Down Expand Up @@ -280,6 +282,15 @@ def convert_h1_to_h2(soup):
return soup


def convert_name_to_id(soup):
"""Convert all (deprecated) name attributes to id attributes."""
for tag in soup.find_all(attrs={"name": True}):
tag["id"] = tag["name"]
del tag["name"]

return soup


def format_nested_lists(soup):
"""Flattens nested lists"""
try:
Expand Down
3 changes: 2 additions & 1 deletion core/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
get_s3_client,
)
from .constants import SourceDocType
from .htmlhelper import modernize_legacy_page
from .htmlhelper import modernize_legacy_page, convert_name_to_id
from .markdown import process_md
from .models import RenderedContent
from .tasks import (
Expand Down Expand Up @@ -472,6 +472,7 @@ def process_content(self, content):
if source_content_type == SourceDocType.ASCIIDOC:
extracted_content = content.decode(chardet.detect(content)["encoding"])
soup = BeautifulSoup(extracted_content, "html.parser")
soup = convert_name_to_id(soup)
soup.find("head").append(
soup.new_tag("script", src=f"{STATIC_URL}js/theme_handling.js")
)
Expand Down
23 changes: 17 additions & 6 deletions templates/docsiframe.html
Original file line number Diff line number Diff line change
Expand Up @@ -12,23 +12,34 @@
{#resizeIframe(iframe);#}
addClickInterception(iframeDoc);
addBase(iframeDoc);
if (window.location.hash) {
scrollToAnchor(iframeDoc, window.location.hash.slice(1));
}
}

function resizeIframe(obj) {
obj.style.height = obj.contentWindow.document.documentElement.scrollHeight + 'px';
}

function addClickInterception(iframeDoc) {
let anchorLinks = iframeDoc.querySelectorAll('a[href^="#"]');
function scrollToAnchor(iframeDoc, hash) {
const targetElement = iframeDoc.getElementById(hash);
if (targetElement) {
targetElement.scrollIntoView({behavior: 'smooth'});
}
}

function addClickInterception(iframeDoc) {
let anchorLinks = iframeDoc.querySelectorAll('a[href*="#"]');
anchorLinks.forEach(function (anchor) {
anchor.addEventListener('click', function (event) {
const href = this.getAttribute('href');
const hrefSplit = href.split('#');
event.preventDefault();
let targetId = this.getAttribute('href').substring(1);
let targetElement = iframeDoc.getElementById(targetId);
if (targetElement) {
targetElement.scrollIntoView({behavior: 'smooth'});
{# here we account for anchors on different pages #}
if (!window.location.href.endsWith(hrefSplit[0])) {
window.location.href = href;
}
scrollToAnchor(iframeDoc, hrefSplit[1]);
});
});
}
Expand Down

0 comments on commit b8c04c4

Please sign in to comment.