|
24 | 24 | from __future__ import print_function
|
25 | 25 | import re
|
26 | 26 | import os.path
|
| 27 | +import tempfile |
| 28 | +import urllib.request |
27 | 29 | from codecs import open
|
28 | 30 | from markdown.extensions import Extension
|
29 | 31 | from markdown.preprocessors import Preprocessor
|
@@ -95,114 +97,139 @@ def __init__(self, md, config):
|
95 | 97 | def run(self, lines):
|
96 | 98 | done = False
|
97 | 99 | bonusHeading = ""
|
98 |
| - while not done: |
99 |
| - for loc, line in enumerate(lines): |
100 |
| - m = INC_SYNTAX.search(line) |
101 |
| - |
102 |
| - while m: |
103 |
| - relative_filename = m.group(1) |
104 |
| - filename = os.path.expanduser(relative_filename) |
105 |
| - if not os.path.isabs(filename): |
106 |
| - filename = os.path.normpath( |
107 |
| - os.path.join(self.base_path, filename) |
108 |
| - ) |
109 |
| - try: |
110 |
| - with open(filename, "r", encoding=self.encoding) as r: |
111 |
| - original_text = self.run(r.readlines()) |
112 |
| - |
113 |
| - except Exception as e: |
114 |
| - if not self.throwException: |
115 |
| - print( |
116 |
| - "Warning: could not find file {}. Ignoring " |
117 |
| - "include statement. Error: {}".format(filename, e) |
| 100 | + files = [] |
| 101 | + try: |
| 102 | + while not done: |
| 103 | + for loc, line in enumerate(lines): |
| 104 | + m = INC_SYNTAX.search(line) |
| 105 | + |
| 106 | + while m: |
| 107 | + filename = m.group(1) |
| 108 | + rel_filename = filename |
| 109 | + if filename.startswith("https://"): |
| 110 | + filename = self.load_remote(filename) |
| 111 | + files.append(filename) |
| 112 | + filename = os.path.expanduser(filename) |
| 113 | + if not os.path.isabs(filename): |
| 114 | + filename = os.path.normpath( |
| 115 | + os.path.join(self.base_path, filename) |
118 | 116 | )
|
119 |
| - lines[loc] = INC_SYNTAX.sub("", line) |
120 |
| - break |
| 117 | + try: |
| 118 | + with open(filename, "r", encoding=self.encoding) as r: |
| 119 | + original_text = self.run(r.readlines()) |
| 120 | + |
| 121 | + except Exception as e: |
| 122 | + if not self.throwException: |
| 123 | + print( |
| 124 | + "Warning: could not find file {}. Ignoring " |
| 125 | + "include statement. Error: {}".format(filename, e) |
| 126 | + ) |
| 127 | + lines[loc] = INC_SYNTAX.sub("", line) |
| 128 | + break |
| 129 | + else: |
| 130 | + raise e |
| 131 | + if m.group(2) is None: |
| 132 | + text = original_text |
121 | 133 | else:
|
122 |
| - raise e |
123 |
| - if m.group(2) is None: |
124 |
| - text = original_text |
125 |
| - else: |
126 |
| - lines_str = m.group(4) |
127 |
| - lines_blocks = lines_str.split() |
128 |
| - wanted_lines = [] |
129 |
| - for block in lines_blocks: |
130 |
| - if "-" in block: |
131 |
| - start, end = block.strip().split("-") |
132 |
| - current_start = int(start) |
133 |
| - current_end = int(end) |
134 |
| - if not len(original_text) >= current_end: |
135 |
| - current_end = len(original_text) |
136 |
| - print( |
137 |
| - f"Warning: line range: {block} ending in " |
138 |
| - f"line: {end} is larger than file: {filename} " |
139 |
| - f"using end: {current_end}" |
| 134 | + lines_str = m.group(4) |
| 135 | + lines_blocks = lines_str.split() |
| 136 | + wanted_lines = [] |
| 137 | + for block in lines_blocks: |
| 138 | + if "-" in block: |
| 139 | + start, end = block.strip().split("-") |
| 140 | + current_start = int(start) |
| 141 | + current_end = int(end) |
| 142 | + if not len(original_text) >= current_end: |
| 143 | + current_end = len(original_text) |
| 144 | + print( |
| 145 | + f"Warning: line range: {block} ending in " |
| 146 | + f"line: {end} is larger than file: {filename} " |
| 147 | + f"using end: {current_end}" |
| 148 | + ) |
| 149 | + if not current_start <= current_end: |
| 150 | + current_start = max(current_end - 1, 1) |
| 151 | + print( |
| 152 | + f"Warning: in line range: {block} " |
| 153 | + f"the start line: {start} is not " |
| 154 | + f"smaller than the end line: {current_end} " |
| 155 | + f"using start: {current_start}" |
| 156 | + ) |
| 157 | + |
| 158 | + wanted_lines.extend( |
| 159 | + original_text[current_start - 1 : current_end] |
140 | 160 | )
|
141 |
| - if not current_start <= current_end: |
142 |
| - current_start = max(current_end - 1, 1) |
143 |
| - print( |
144 |
| - f"Warning: in line range: {block} " |
145 |
| - f"the start line: {start} is not " |
146 |
| - f"smaller than the end line: {current_end} " |
147 |
| - f"using start: {current_start}" |
| 161 | + else: |
| 162 | + wanted_line = int(block.strip()) |
| 163 | + current_line = wanted_line |
| 164 | + if current_line > len(original_text): |
| 165 | + current_line = len(original_text) |
| 166 | + print( |
| 167 | + f"Warning: line: {wanted_line} is larger than " |
| 168 | + f"file: {filename} using end: {current_line}" |
| 169 | + ) |
| 170 | + wanted_lines.append(original_text[current_line - 1]) |
| 171 | + text = wanted_lines |
| 172 | + |
| 173 | + if len(text) == 0: |
| 174 | + text.append("") |
| 175 | + for i in range(len(text)): |
| 176 | + # Strip the newline, and optionally increase header depth |
| 177 | + if HEADING_SYNTAX.search(text[i]): |
| 178 | + if self.inheritHeadingDepth: |
| 179 | + text[i] = bonusHeading + text[i] |
| 180 | + if self.headingOffset: |
| 181 | + text[i] = "#" * self.headingOffset + text[i] |
| 182 | + link = LINK_SYNTAX.search(text[i]) |
| 183 | + if link: |
| 184 | + raw_path = link.group(2) |
| 185 | + if ( |
| 186 | + not raw_path.startswith("http") |
| 187 | + and not raw_path.startswith("/") |
| 188 | + and not raw_path.startswith("#") |
| 189 | + ): |
| 190 | + path_ = f"{os.path.dirname(rel_filename)}{os.path.sep}{raw_path}" |
| 191 | + text[i] = ( |
| 192 | + text[i][: link.start(2)] |
| 193 | + + path_ |
| 194 | + + text[i][link.end(2) :] |
148 | 195 | )
|
149 | 196 |
|
150 |
| - wanted_lines.extend( |
151 |
| - original_text[current_start - 1 : current_end] |
152 |
| - ) |
153 |
| - else: |
154 |
| - wanted_line = int(block.strip()) |
155 |
| - current_line = wanted_line |
156 |
| - if current_line > len(original_text): |
157 |
| - current_line = len(original_text) |
158 |
| - print( |
159 |
| - f"Warning: line: {wanted_line} is larger than " |
160 |
| - f"file: {filename} using end: {current_line}" |
161 |
| - ) |
162 |
| - wanted_lines.append(original_text[current_line - 1]) |
163 |
| - text = wanted_lines |
164 |
| - |
165 |
| - if len(text) == 0: |
166 |
| - text.append("") |
167 |
| - for i in range(len(text)): |
168 |
| - # Strip the newline, and optionally increase header depth |
169 |
| - if HEADING_SYNTAX.search(text[i]): |
170 |
| - if self.inheritHeadingDepth: |
171 |
| - text[i] = bonusHeading + text[i] |
172 |
| - if self.headingOffset: |
173 |
| - text[i] = "#" * self.headingOffset + text[i] |
174 |
| - link = LINK_SYNTAX.search(text[i]) |
175 |
| - if link: |
176 |
| - raw_path = link.group(2) |
177 |
| - if ( |
178 |
| - not raw_path.startswith("http") |
179 |
| - and not raw_path.startswith("/") |
180 |
| - and not raw_path.startswith("#") |
181 |
| - ): |
182 |
| - path_ = f"{os.path.dirname(relative_filename)}{os.path.sep}{raw_path}" |
183 |
| - text[i] = ( |
184 |
| - text[i][: link.start(2)] |
185 |
| - + path_ |
186 |
| - + text[i][link.end(2) :] |
187 |
| - ) |
| 197 | + text[i] = text[i].rstrip("\r\n") |
| 198 | + text_to_insert = "\r\n".join(text) |
| 199 | + line = line[: m.start()] + text_to_insert.strip() + line[m.end() :] |
| 200 | + del lines[loc] |
| 201 | + lines[loc:loc] = line.splitlines() |
| 202 | + m = INC_SYNTAX.search(line) |
188 | 203 |
|
189 |
| - text[i] = text[i].rstrip("\r\n") |
190 |
| - text_to_insert = "\r\n".join(text) |
191 |
| - line = line[: m.start()] + text_to_insert.strip() + line[m.end() :] |
192 |
| - del lines[loc] |
193 |
| - lines[loc:loc] = line.splitlines() |
194 |
| - m = INC_SYNTAX.search(line) |
| 204 | + else: |
| 205 | + h = HEADING_SYNTAX.search(line) |
| 206 | + if h: |
| 207 | + headingDepth = len(h.group(0)) |
| 208 | + bonusHeading = "#" * headingDepth |
195 | 209 |
|
196 | 210 | else:
|
197 |
| - h = HEADING_SYNTAX.search(line) |
198 |
| - if h: |
199 |
| - headingDepth = len(h.group(0)) |
200 |
| - bonusHeading = "#" * headingDepth |
201 |
| - |
202 |
| - else: |
203 |
| - done = True |
| 211 | + done = True |
| 212 | + finally: |
| 213 | + # clean up any temp files we created |
| 214 | + for file in files: |
| 215 | + os.remove(file) |
204 | 216 | return lines
|
205 | 217 |
|
| 218 | + def load_remote(self, filename): |
| 219 | + name = None |
| 220 | + # do not delete on close as we open it later on |
| 221 | + with tempfile.NamedTemporaryFile(mode="wb", delete=False) as file: |
| 222 | + name = file.name |
| 223 | + try: |
| 224 | + urlopen = urllib.request.urlopen(filename) |
| 225 | + data = urlopen.read() |
| 226 | + file.write(data) |
| 227 | + except urllib.error.HTTPError as e: |
| 228 | + file.write( |
| 229 | + bytes(f"Error loading remote template ({filename}): {e}", "utf-8") |
| 230 | + ) |
| 231 | + return name |
| 232 | + |
206 | 233 |
|
207 | 234 | def makeExtension(*args, **kwargs):
|
208 | 235 | return MarkdownInclude(kwargs)
|
0 commit comments