-
Notifications
You must be signed in to change notification settings - Fork 2
/
generate_alps.py
87 lines (69 loc) · 2.52 KB
/
generate_alps.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import sys
import urllib2
from bs4 import BeautifulSoup
URL = "http://www.iana.org/assignments/link-relations/link-relations.xml"
IANA_GO_BASE = "http://www.iana.org/go/"
PREFIX = """<?xml version="1.0" ?>
<alps>
<link rel="self" href="http://alps.io/iana/relations"/>
<link rel="help" href="http://www.iana.org/assignments/link-relations/link-relations.xml"/>
<doc format="html">
<p>This document contains ALPS versions of the link relations
registered with the <a
href="http://www.iana.org/assignments/link-relations/link-relations.xml">IANA
registry of link relations</a>. This document reflects the
%(updated)s revision of the registry.</p>
<p>The descriptions and links found in this ALPS document are
informative. Every link relation in this document is equivalent to a
relation registered with the IANA registry, and the IANA description
is the normative one.</p>
</doc>
"""
SUFFIX = "</alps>"
def xref_to_url(xref):
ref_type = xref['type'] # "uri", "draft", or "rfc".
ref_data = xref['data']
help_link = None
if ref_type == 'uri':
url = ref_data
else:
# Internet-Draft or RFC.
url = IANA_GO_BASE + ref_data
return url
def record_as_descriptor(record):
relation = record.value.string
description = record.description
# Replace xrefs in description with html links.
for xref in description.find_all('xref'):
xref.name = 'a'
xref['href'] = xref_to_url(xref)
xref.string = xref['data']
del xref['type']
del xref['data']
# TODO: do the same for note.
spec = record.spec
xref = spec.find('xref')
help_link = xref_to_url(xref)
note = spec.find('note') or ''
if note != '':
note = "<p>%s</p>" % note.string
values = dict(relation=relation, description=description, note=note,
help_link=help_link)
data = []
data.append(' <descriptor id="%(relation)s" type="safe">' % values)
data.append(' <link rel="help" href="%(help_link)s"/>' % values)
data.append(' <doc format="html">')
data.append(' <p>%s</p>%s' % (description.decode_contents(), note))
data.append(' </doc>')
data.append(' </descriptor>')
data.append('')
return "\n".join(data)
input = urllib2.urlopen(URL).read()
soup = BeautifulSoup(input, "xml")
updated = soup.registry.updated.string
out = sys.stdout
out.write(PREFIX % dict(updated=updated))
for record in soup.find_all("record"):
out.write(record_as_descriptor(record).encode("utf8"))
out.write("\n")
out.write(SUFFIX)