]> git.ipfire.org Git - pakfire.git/blob - python/pakfire/packages/lexer.py
Bump version 0.9.9.
[pakfire.git] / python / pakfire / packages / lexer.py
1 #!/usr/bin/python
2
3 import logging
4 import os
5 import re
6
7 from pakfire.constants import *
8
9 class LexerError(Exception):
10 pass
11
12
13 class LexerUnhandledLine(LexerError):
14 pass
15
16
17 class EndOfFileError(LexerError):
18 pass
19
20
21 class LexerUndefinedVariableError(LexerError):
22 pass
23
24
25 LEXER_VALID_PACKAGE_NAME = re.compile(r"[A-Za-z][A-Za-z0-9\_\-\+]")
26
27 # XXX need to build check
28 LEXER_VALID_SCRIPTLET_NAME = re.compile(r"((pre|post|posttrans)(in|un|up))")
29
30 LEXER_COMMENT_CHAR = "#"
31 LEXER_COMMENT = re.compile(r"^\s*#")
32 LEXER_QUOTES = "\"'"
33 LEXER_EMPTY_LINE = re.compile(r"^\s*$")
34
35 LEXER_DEFINITION = re.compile(r"^([A-Za-z0-9_\-]+)\s*(\+)?=\s*(.+)?$")
36
37 LEXER_BLOCK_LINE_INDENT = "\t"
38 LEXER_BLOCK_LINE = re.compile(r"^\t(.*)$")
39 LEXER_BLOCK_END = re.compile(r"^end$")
40
41 LEXER_DEFINE_BEGIN = re.compile(r"^(def)?\s?([A-Za-z0-9_\-]+)$")
42 LEXER_DEFINE_LINE = LEXER_BLOCK_LINE
43 LEXER_DEFINE_END = LEXER_BLOCK_END
44
45 LEXER_PACKAGE_BEGIN = re.compile(r"^package ([A-Za-z0-9_\-\+\%\{\}]+)$")
46 LEXER_PACKAGE_LINE = LEXER_BLOCK_LINE
47 LEXER_PACKAGE_END = LEXER_BLOCK_END
48 LEXER_PACKAGE_INHERIT = re.compile(r"^template ([A-Z]+)$")
49
50 LEXER_SCRIPTLET_BEGIN = re.compile(r"^script ([a-z]+)\s?(/[A-Za-z0-9\-\_/]+)?$")
51 LEXER_SCRIPTLET_LINE = LEXER_BLOCK_LINE
52 LEXER_SCRIPTLET_END = LEXER_BLOCK_END
53
54 LEXER_TEMPLATE_BEGIN = re.compile(r"^template ([A-Z]+)$")
55 LEXER_TEMPLATE_LINE = LEXER_BLOCK_LINE
56 LEXER_TEMPLATE_END = LEXER_BLOCK_END
57
58 LEXER_BUILD_BEGIN = re.compile(r"^build$")
59 LEXER_BUILD_LINE = LEXER_BLOCK_LINE
60 LEXER_BUILD_END = LEXER_BLOCK_END
61
62 LEXER_DEPS_BEGIN = re.compile(r"^dependencies$")
63 LEXER_DEPS_LINE = LEXER_BLOCK_LINE
64 LEXER_DEPS_END = LEXER_BLOCK_END
65
66 LEXER_DISTRO_BEGIN = re.compile(r"^distribution$")
67 LEXER_DISTRO_LINE = LEXER_BLOCK_LINE
68 LEXER_DISTRO_END = LEXER_BLOCK_END
69
70 LEXER_PACKAGES_BEGIN = re.compile(r"^packages$")
71 LEXER_PACKAGES_LINE = LEXER_BLOCK_LINE
72 LEXER_PACKAGES_END = LEXER_BLOCK_END
73
74 LEXER_PACKAGE2_BEGIN = re.compile(r"^package$")
75 LEXER_PACKAGE2_LINE = LEXER_BLOCK_LINE
76 LEXER_PACKAGE2_END = LEXER_BLOCK_END
77
78 LEXER_QUALITY_AGENT_BEGIN = re.compile(r"^quality-agent$")
79 LEXER_QUALITY_AGENT_LINE = LEXER_BLOCK_LINE
80 LEXER_QUALITY_AGENT_END = LEXER_BLOCK_END
81
82 # Statements:
83 LEXER_EXPORT = re.compile(r"^export\s+([A-Za-z0-9_\-]+)\s*(\+)?=\s*(.+)?$")
84 LEXER_EXPORT2 = re.compile(r"^export\s+([A-Za-z0-9_\-]+)$")
85 LEXER_UNEXPORT = re.compile(r"^unexport\s+([A-Za-z0-9_\-]+)$")
86 LEXER_INCLUDE = re.compile(r"^include\s+(.+)$")
87
88 LEXER_VARIABLE = re.compile(r"\%\{([A-Za-z0-9_\-]+)\}")
89 LEXER_SHELL = re.compile(r"\%\(.*\)")
90
91 LEXER_IF_IF = re.compile(r"^if\s+(.*)\s+(==|!=)\s+(.*)\s*")
92 LEXER_IF_ELIF = re.compile(r"^elif\s+(.*)\s*(==|!=)\s*(.*)\s*")
93 LEXER_IF_ELSE = re.compile(r"^else")
94 LEXER_IF_LINE = LEXER_BLOCK_LINE
95 LEXER_IF_END = LEXER_BLOCK_END
96
97 class Lexer(object):
98 def __init__(self, lines=[], parent=None, environ=None):
99 self.lines = lines
100 self.parent = parent
101
102 self._lineno = 0
103
104 # A place to store all definitions.
105 self._definitions = {}
106
107 # Init function that can be overwritten by child classes.
108 self.init(environ)
109
110 # Run the parser.
111 self.run()
112
113 def inherit(self, other):
114 """
115 Inherit everything from other lexer.
116 """
117 self._definitions.update(other._definitions)
118
119 @property
120 def definitions(self):
121 return self._definitions
122
123 @classmethod
124 def open(cls, filename, *args, **kwargs):
125 f = open(filename)
126 lines = f.readlines()
127 f.close()
128
129 return cls(lines, *args, **kwargs)
130
131 @property
132 def lineno(self):
133 return self._lineno + 1
134
135 @property
136 def root(self):
137 if self.parent:
138 return self.parent.root
139
140 return self
141
142 def get_line(self, no, raw=False):
143 try:
144 line = self.lines[no]
145 except KeyError:
146 raise EndOfFileError
147
148 # Strip newline.
149 line = line.rstrip("\n")
150
151 # DEBUG
152 #print line
153
154 if raw:
155 return line
156
157 # strip comments - caution: quotations
158
159 if line.startswith(LEXER_COMMENT_CHAR):
160 return ""
161
162 # XXX fix removing of comments in lines
163 #i = -1
164 #length = len(line)
165 #quote = None
166
167 #for i in range(length):
168 # s = line[i]
169
170 # if s in LEXER_QUOTES:
171 # if quote == s:
172 # quote = None
173 # else:
174 # quote = s
175
176 # if s == LEXER_COMMENT_CHAR:
177 # return line[:i+1]
178
179 return line
180
181 def line_is_empty(self):
182 line = self.get_line(self._lineno)
183
184 m = re.match(LEXER_EMPTY_LINE, line)
185 if m:
186 return True
187
188 return False
189
190 def expand_string(self, s):
191 if s is None:
192 return ""
193
194 while s:
195 m = re.search(LEXER_VARIABLE, s)
196 if not m:
197 break
198
199 var = m.group(1)
200 s = s.replace("%%{%s}" % var, self.get_var(var))
201
202 return s
203
204 def get_var(self, key, default=None, raw=False):
205 definitions = {}
206 definitions.update(self.root.definitions)
207 if self.parent:
208 definitions.update(self.parent.definitions)
209 definitions.update(self.definitions)
210
211 val = None
212 try:
213 val = definitions[key]
214 except KeyError:
215 pass
216
217 if val is None:
218 val = default
219
220 if raw:
221 return val
222
223 return self.expand_string(val)
224
225 def init(self, environ):
226 pass
227
228 def get_default_parsers(self):
229 return [
230 (LEXER_COMMENT, self.parse_comment),
231 (LEXER_DEFINITION, self.parse_definition),
232 (LEXER_DEFINE_BEGIN, self.parse_define),
233 (LEXER_IF_IF, self.parse_if),
234 ]
235
236 def get_parsers(self):
237 return []
238
239 def parse_line(self):
240 # Skip empty lines.
241 if self.line_is_empty():
242 self._lineno += 1
243 return
244
245 line = self.get_line(self._lineno)
246
247 parsers = self.get_parsers() + self.get_default_parsers()
248
249 found = False
250 for pattern, func in parsers:
251 m = re.match(pattern, line)
252 if m:
253 # Hey, I found a match, we parse it with the subparser function.
254 found = True
255 func()
256
257 break
258
259 if not found:
260 raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
261
262 def read_block(self, pattern_start=None, pattern_line=None, pattern_end=None,
263 raw=False):
264 assert pattern_start
265 assert pattern_line
266 assert pattern_end
267
268 line = self.get_line(self._lineno)
269
270 m = re.match(pattern_start, line)
271 if not m:
272 raise LexerError
273
274 # Go in to next line.
275 self._lineno += 1
276
277 groups = m.groups()
278
279 lines = []
280 while True:
281 line = self.get_line(self._lineno, raw=raw)
282
283 m = re.match(pattern_end, line)
284 if m:
285 self._lineno += 1
286 break
287
288 m = re.match(pattern_line, line)
289 if m:
290 lines.append(m.group(1))
291 self._lineno += 1
292 continue
293
294 m = re.match(LEXER_EMPTY_LINE, line)
295 if m:
296 lines.append("")
297 self._lineno += 1
298 continue
299
300 if not line.startswith(LEXER_BLOCK_LINE_INDENT):
301 raise LexerError, "Line has not the right indentation: %d: %s" \
302 % (self.lineno, line)
303
304 raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
305
306 return (groups, lines)
307
308 def run(self):
309 while self._lineno < len(self.lines):
310 self.parse_line()
311
312 def parse_comment(self):
313 line = self.get_line(self._lineno)
314
315 if not line:
316 return
317
318 raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
319
320 def parse_definition(self, pattern=LEXER_DEFINITION):
321 line = self.get_line(self._lineno)
322
323 m = re.match(pattern, line)
324 if not m:
325 raise LexerError, "Not a definition: %s" % line
326
327 # Line was correctly parsed, can go on.
328 self._lineno += 1
329
330 k, o, v = m.groups()
331
332 if o == "+":
333 prev = self.get_var(k, default=None, raw=True)
334 if prev:
335 v = " ".join((prev or "", v))
336
337 # Handle backslash.
338 while v and v.endswith("\\"):
339 line = self.get_line(self._lineno)
340 self._lineno += 1
341
342 v = v[:-1] + line
343
344 self._definitions[k] = v
345
346 return k, v
347
348 def parse_define(self):
349 line = self.get_line(self._lineno)
350
351 m = re.match(LEXER_DEFINE_BEGIN, line)
352 if not m:
353 raise Exception, "XXX not a define"
354
355 # Check content of next line.
356 found = None
357 i = 1
358 while True:
359 line = self.get_line(self._lineno + i)
360
361 # Skip empty lines.
362 empty = re.match(LEXER_EMPTY_LINE, line)
363 if empty:
364 i += 1
365 continue
366
367 for pattern in (LEXER_DEFINE_LINE, LEXER_DEFINE_END):
368 found = re.match(pattern, line)
369 if found:
370 break
371
372 if found:
373 break
374
375 if found is None:
376 line = self.get_line(self._lineno)
377 raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
378
379 # Go in to next line.
380 self._lineno += 1
381
382 key = m.group(2)
383 assert key
384
385 value = []
386 while True:
387 line = self.get_line(self._lineno)
388
389 m = re.match(LEXER_DEFINE_END, line)
390 if m:
391 self._lineno += 1
392 break
393
394 m = re.match(LEXER_DEFINE_LINE, line)
395 if m:
396 self._lineno += 1
397 value.append(m.group(1))
398 continue
399
400 m = re.match(LEXER_EMPTY_LINE, line)
401 if m:
402 self._lineno += 1
403 value.append("")
404 continue
405
406 raise LexerError, "Unhandled line: %s" % line
407
408 self._definitions[key] = "\n".join(value)
409
410 def _parse_if_block(self, first=True):
411 line = self.get_line(self._lineno)
412
413 found = False
414
415 if first:
416 m = re.match(LEXER_IF_IF, line)
417 if m:
418 found = True
419 else:
420 m = re.match(LEXER_IF_ELIF, line)
421 if m:
422 found = True
423 else:
424 m = re.match(LEXER_IF_ELSE, line)
425 if m:
426 found = True
427
428 if not found:
429 raise LexerError, "No valid begin of if statement: %d: %s" \
430 % (self.lineno, line)
431
432 self._lineno += 1
433 clause = m.groups()
434 lines = []
435
436 block_closed = False
437 while len(self.lines) >= self._lineno:
438 line = self.get_line(self._lineno)
439
440 for pattern in (LEXER_IF_END, LEXER_IF_ELIF, LEXER_IF_ELSE):
441 m = re.match(pattern, line)
442 if m:
443 block_closed = True
444 break
445
446 if block_closed:
447 break
448
449 m = re.match(LEXER_IF_LINE, line)
450 if m:
451 self._lineno += 1
452 lines.append("%s" % m.groups())
453 continue
454
455 raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
456
457 if not block_closed:
458 raise LexerError, "Unclosed if block"
459
460 return (clause, lines)
461
462 def parse_if(self):
463 blocks = []
464 blocks.append(self._parse_if_block(first=True))
465
466 while len(self.lines) >= self._lineno:
467 line = self.get_line(self._lineno)
468
469 found = False
470 for pattern in (LEXER_IF_ELIF, LEXER_IF_ELSE,):
471 m = re.match(pattern, line)
472 if m:
473 # Found another block.
474 found = True
475 blocks.append(self._parse_if_block(first=False))
476 break
477
478 if not found:
479 break
480
481 # Check for end.
482 line = self.get_line(self._lineno)
483 m = re.match(LEXER_IF_END, line)
484 if not m:
485 raise LexerError, "Unclosed if clause"
486
487 self._lineno += 1
488
489 # Read in all blocks, now we evaluate each clause.
490 for clause, lines in blocks:
491 val = False
492
493 if len(clause) == 3:
494 a, op, b = clause
495
496 # Remove leading and trailing "s and 's.
497 a = a.lstrip("\"'").rstrip("\"'")
498 b = b.lstrip("\"'").rstrip("\"'")
499
500 a = self.expand_string(a)
501 b = self.expand_string(b)
502
503 if op == "==":
504 val = a == b
505 elif op == "!=":
506 val = not a == b
507 else:
508 raise LexerError, "Unknown operator: %s" % op
509
510 else:
511 # Else is always true.
512 val = True
513
514 # If any clause is true, we can parse the block.
515 if val:
516 lexer = self.__class__(lines, parent=self)
517 self.inherit(lexer)
518 break
519
520
521 class DefaultLexer(Lexer):
522 """
523 A lexer which only knows about simple definitions.
524 """
525 pass
526
527
528 class QualityAgentLexer(DefaultLexer):
529 """
530 A lexer to read quality agent exceptions.
531 """
532 @property
533 def exports(self):
534 exports = {}
535
536 # Check if we permit full relro.
537 if self.get_var("permit_not_full_relro"):
538 exports["QUALITY_AGENT_PERMIT_NOT_FULL_RELRO"] = \
539 self.get_var("permit_not_full_relro")
540
541 # Check if we permit $ORIGIN in rpath.
542 if self.get_var("rpath_allow_origin"):
543 exports["QUALITY_AGENT_RPATH_ALLOW_ORIGIN"] = \
544 self.get_var("rpath_allow_origin")
545
546 # Load execstack whitelist.
547 if self.get_var("whitelist_execstack"):
548 exports["QUALITY_AGENT_WHITELIST_EXECSTACK"] = \
549 self.get_var("whitelist_execstack")
550
551 # Load nx whitelist.
552 if self.get_var("whitelist_nx"):
553 exports["QUALITY_AGENT_WHITELIST_NX"] = \
554 self.get_var("whitelist_nx")
555
556 # Load rpath whitelist.
557 if self.get_var("whitelist_rpath"):
558 exports["QUALITY_AGENT_WHITELIST_RPATH"] = \
559 self.get_var("whitelist_rpath")
560
561 # Load symlink whitelist
562 if self.get_var("whitelist_symlink"):
563 exports["QUALITY_AGENT_WHITELIST_SYMLINK"] = \
564 self.get_var("whitelist_symlink")
565
566 return exports
567
568
569 class TemplateLexer(DefaultLexer):
570 def init(self, environ):
571 # A place to store the scriptlets.
572 self.scriptlets = {}
573
574 @property
575 def definitions(self):
576 definitions = {}
577
578 assert self.parent
579 definitions.update(self.parent.definitions)
580 definitions.update(self._definitions)
581
582 return definitions
583
584 def get_parsers(self):
585 return [
586 (LEXER_SCRIPTLET_BEGIN, self.parse_scriptlet),
587 ]
588
589 def parse_scriptlet(self):
590 line = self.get_line(self._lineno)
591
592 m = re.match(LEXER_SCRIPTLET_BEGIN, line)
593 if not m:
594 raise Exception, "Not a scriptlet"
595
596 self._lineno += 1
597
598 name = m.group(1)
599
600 # check if scriptlet was already defined.
601 if self.scriptlets.has_key(name):
602 raise Exception, "Scriptlet %s is already defined" % name
603
604 path = m.group(2)
605 if path:
606 self.scriptlets[name] = {
607 "lang" : "bin",
608 "path" : self.expand_string(path),
609 }
610 return
611
612 lines = []
613 while True:
614 line = self.get_line(self._lineno, raw=True)
615
616 m = re.match(LEXER_SCRIPTLET_END, line)
617 if m:
618 self._lineno += 1
619 break
620
621 m = re.match(LEXER_SCRIPTLET_LINE, line)
622 if m:
623 lines.append(m.group(1))
624 self._lineno += 1
625 continue
626
627 m = re.match(LEXER_EMPTY_LINE, line)
628 if m:
629 lines.append("")
630 self._lineno += 1
631 continue
632
633 raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
634
635 self.scriptlets[name] = {
636 "lang" : "shell",
637 "scriptlet" : "\n".join(lines),
638 }
639
640
641 class PackageLexer(TemplateLexer):
642 def init(self, environ):
643 TemplateLexer.init(self, environ)
644
645 self._template = "MAIN"
646
647 assert isinstance(self.parent, PackagesLexer) or \
648 isinstance(self.parent, PackageLexer), self.parent
649
650 @property
651 def definitions(self):
652 definitions = {}
653
654 if self.template:
655 definitions.update(self.template.definitions)
656
657 definitions.update(self._definitions)
658
659 return definitions
660
661 @property
662 def template(self):
663 if not self._template:
664 return None
665
666 # Get template from parent.
667 try:
668 return self.root.templates[self._template]
669 except KeyError:
670 raise LexerError, "Template does not exist: %s" % self._template
671
672 def get_parsers(self):
673 parsers = [
674 (LEXER_PACKAGE_INHERIT, self.parse_inherit),
675 ] + TemplateLexer.get_parsers(self)
676
677 return parsers
678
679 def parse_inherit(self):
680 line = self.get_line(self._lineno)
681
682 m = re.match(LEXER_PACKAGE_INHERIT, line)
683 if not m:
684 raise LexerError, "Not a template inheritance: %s" % line
685
686 self._lineno += 1
687
688 self._template = m.group(1)
689
690 # Check if template exists.
691 assert self.template
692
693
694 class ExportLexer(DefaultLexer):
695 @property
696 def exports(self):
697 if not hasattr(self.parent, "exports"):
698 return self._exports
699
700 exports = []
701 for export in self._exports + self.parent.exports:
702 exports.append(export)
703
704 return exports
705
706 def init(self, environ):
707 # A list of variables that should be exported in the build
708 # environment.
709 self._exports = []
710 self._unexports = []
711
712 def get_parsers(self):
713 return [
714 (LEXER_EXPORT, self.parse_export),
715 (LEXER_EXPORT2, self.parse_export2),
716 (LEXER_UNEXPORT, self.parse_unexport),
717 ]
718
719 def inherit(self, other):
720 DefaultLexer.inherit(self, other)
721
722 # Try to remove all unexported variables.
723 for unexport in other._unexports:
724 try:
725 self._exports.remove(unexport)
726 except:
727 pass
728
729 for export in other._exports:
730 if not export in self._exports:
731 self._exports.append(export)
732
733 def parse_export(self):
734 k, v = self.parse_definition(pattern=LEXER_EXPORT)
735
736 if k and not k in self.exports:
737 self._exports.append(k)
738
739 def parse_export2(self):
740 line = self.get_line(self._lineno)
741 self._lineno += 1
742
743 m = re.match(LEXER_EXPORT2, line)
744 if m:
745 k = m.group(1)
746 if k and k in self.exports:
747 self._exports.append(k)
748
749 def parse_unexport(self):
750 line = self.get_line(self._lineno)
751 self._lineno += 1
752
753 m = re.match(LEXER_UNEXPORT, line)
754 if m:
755 k = m.group(1)
756 if k and k in self.exports:
757 self._exports.remove(k)
758 self._unexports.append(k)
759
760
761 class BuildLexer(ExportLexer):
762 @property
763 def stages(self):
764 return self.definitions
765
766
767 class RootLexer(ExportLexer):
768 def init(self, environ):
769 ExportLexer.init(self, environ)
770
771 # A place to store all packages and templates.
772 self.packages = PackagesLexer([], parent=self)
773
774 # Import all environment variables.
775 if environ:
776 for k, v in environ.items():
777 self._definitions[k] = v
778
779 self.exports.append(k)
780
781 # Place for build instructions
782 self.build = BuildLexer([], parent=self)
783
784 # Place for quality-agent exceptions
785 self.quality_agent = QualityAgentLexer([], parent=self)
786
787 # Include all macros.
788 if not self.parent:
789 for macro in MACRO_FILES:
790 self.include(macro)
791
792 def include(self, file):
793 # Create a new lexer, and parse the whole file.
794 include = RootLexer.open(file, parent=self)
795
796 # Copy all data from the included file.
797 self.inherit(include)
798
799 def inherit(self, other):
800 """
801 Inherit everything from other lexer.
802 """
803 ExportLexer.inherit(self, other)
804
805 self._definitions.update(other._definitions)
806
807 self.build.inherit(other.build)
808 self.packages.inherit(other.packages)
809 self.quality_agent.inherit(other.quality_agent)
810
811 @property
812 def templates(self):
813 return self.packages.templates
814
815 def get_parsers(self):
816 parsers = ExportLexer.get_parsers(self)
817 parsers += [
818 (LEXER_INCLUDE, self.parse_include),
819 (LEXER_PACKAGES_BEGIN, self.parse_packages),
820 (LEXER_BUILD_BEGIN, self.parse_build),
821 (LEXER_QUALITY_AGENT_BEGIN, self.parse_quality_agent),
822 ]
823
824 return parsers
825
826 def parse_build(self):
827 line = self.get_line(self._lineno)
828
829 m = re.match(LEXER_BUILD_BEGIN, line)
830 if not m:
831 raise LexerError, "Not a build statement: %s" % line
832
833 self._lineno += 1
834
835 lines = []
836
837 while True:
838 line = self.get_line(self._lineno)
839
840 m = re.match(LEXER_BUILD_END, line)
841 if m:
842 self._lineno += 1
843 break
844
845 m = re.match(LEXER_BUILD_LINE, line)
846 if m:
847 lines.append(m.group(1))
848 self._lineno += 1
849
850 # Accept empty lines.
851 m = re.match(LEXER_EMPTY_LINE, line)
852 if m:
853 lines.append(line)
854 self._lineno += 1
855 continue
856
857 build = BuildLexer(lines, parent=self.build)
858 self.build.inherit(build)
859
860 def parse_include(self):
861 line = self.get_line(self._lineno)
862
863 m = re.match(LEXER_INCLUDE, line)
864 if not m:
865 raise LexerError, "Not an include statement: %s" % line
866
867 # Get the filename from the line.
868 file = m.group(1)
869 file = self.expand_string(file)
870
871 # Include the content of the file.
872 self.include(file)
873
874 # Go on to next line.
875 self._lineno += 1
876
877 def parse_packages(self):
878 keys, lines = self.read_block(
879 pattern_start=LEXER_PACKAGES_BEGIN,
880 pattern_line=LEXER_PACKAGES_LINE,
881 pattern_end=LEXER_PACKAGES_END,
882 raw=True,
883 )
884
885 pkgs = PackagesLexer(lines, parent=self.packages)
886 self.packages.inherit(pkgs)
887
888 def parse_quality_agent(self):
889 keys, lines = self.read_block(
890 pattern_start=LEXER_QUALITY_AGENT_BEGIN,
891 pattern_line=LEXER_QUALITY_AGENT_LINE,
892 pattern_end=LEXER_QUALITY_AGENT_END,
893 raw = True,
894 )
895
896 qa = QualityAgentLexer(lines, parent=self.quality_agent)
897 self.quality_agent.inherit(qa)
898
899
900 class PackagesLexer(DefaultLexer):
901 def init(self, environ):
902 # A place to store all templates.
903 self.templates = {}
904
905 # A place to store all packages.
906 self.packages = []
907
908 def inherit(self, other):
909 # Copy all templates and packages but make sure
910 # to update the parent lexer (for accessing each other).
911 for name, template in other.templates.items():
912 template.parent = self
913 self.templates[name] = template
914
915 for pkg in other.packages:
916 pkg.parent = self
917 self.packages.append(pkg)
918
919 def __iter__(self):
920 return iter(self.packages)
921
922 def get_parsers(self):
923 return [
924 (LEXER_TEMPLATE_BEGIN, self.parse_template),
925 (LEXER_PACKAGE_BEGIN, self.parse_package),
926 ]
927
928 def parse_template(self):
929 line = self.get_line(self._lineno)
930
931 m = re.match(LEXER_TEMPLATE_BEGIN, line)
932 if not m:
933 raise Exception, "Not a template"
934
935 # Line was correctly parsed, can go on.
936 self._lineno += 1
937
938 name = m.group(1)
939 lines = []
940
941 while True:
942 line = self.get_line(self._lineno)
943
944 m = re.match(LEXER_TEMPLATE_END, line)
945 if m:
946 self._lineno += 1
947 break
948
949 m = re.match(LEXER_TEMPLATE_LINE, line)
950 if m:
951 lines.append(m.group(1))
952 self._lineno += 1
953
954 # Accept empty lines.
955 m = re.match(LEXER_EMPTY_LINE, line)
956 if m:
957 lines.append(line)
958 self._lineno += 1
959 continue
960
961 template = TemplateLexer(lines, parent=self)
962 self.templates[name] = template
963
964 def parse_package(self):
965 line = self.get_line(self._lineno)
966
967 m = re.match(LEXER_PACKAGE_BEGIN, line)
968 if not m:
969 raise Exception, "Not a package: %s" %line
970
971 self._lineno += 1
972
973 name = m.group(1)
974 name = self.expand_string(name)
975
976 m = re.match(LEXER_VALID_PACKAGE_NAME, name)
977 if not m:
978 raise LexerError, "Invalid package name: %s" % name
979
980 lines = ["_name = %s" % name]
981
982 opened = False
983 while len(self.lines) > self._lineno:
984 line = self.get_line(self._lineno)
985
986 m = re.match(LEXER_PACKAGE_END, line)
987 if m:
988 opened = False
989 self._lineno += 1
990 break
991
992 m = re.match(LEXER_PACKAGE_LINE, line)
993 if m:
994 self._lineno += 1
995 lines.append(m.group(1))
996 opened = True
997 continue
998
999 # Accept empty lines.
1000 m = re.match(LEXER_EMPTY_LINE, line)
1001 if m:
1002 self._lineno += 1
1003 lines.append(line)
1004 continue
1005
1006 # If there is an unhandled line in a block, we raise an error.
1007 if opened:
1008 raise Exception, "XXX unhandled line in package block: %s" % line
1009
1010 # If the block was never opened, we just go on.
1011 else:
1012 break
1013
1014 if opened:
1015 raise LexerError, "Unclosed package block '%s'." % name
1016
1017 package = PackageLexer(lines, parent=self)
1018 self.packages.append(package)
1019
1020
1021 class FileLexer(DefaultLexer):
1022 def init(self, environ):
1023 self.build = DefaultLexer()
1024 self.deps = DefaultLexer()
1025 self.distro = DefaultLexer()
1026 self.package = DefaultLexer()
1027
1028 def get_parsers(self):
1029 return [
1030 (LEXER_BUILD_BEGIN, self.parse_build),
1031 (LEXER_DISTRO_BEGIN, self.parse_distro),
1032 (LEXER_PACKAGE2_BEGIN, self.parse_package),
1033 (LEXER_DEPS_BEGIN, self.parse_deps),
1034 ]
1035
1036 def parse_build(self):
1037 keys, lines = self.read_block(
1038 pattern_start=LEXER_BUILD_BEGIN,
1039 pattern_line=LEXER_BUILD_LINE,
1040 pattern_end=LEXER_BUILD_END,
1041 raw=True,
1042 )
1043
1044 build = DefaultLexer(lines)
1045 self.build.inherit(build)
1046
1047 def parse_distro(self):
1048 keys, lines = self.read_block(
1049 pattern_start=LEXER_DISTRO_BEGIN,
1050 pattern_line=LEXER_DISTRO_LINE,
1051 pattern_end=LEXER_DISTRO_END,
1052 raw=True,
1053 )
1054
1055 distro = DefaultLexer(lines)
1056 self.distro.inherit(distro)
1057
1058 def parse_package(self):
1059 keys, lines = self.read_block(
1060 pattern_start=LEXER_PACKAGE2_BEGIN,
1061 pattern_line=LEXER_PACKAGE2_LINE,
1062 pattern_end=LEXER_PACKAGE2_END,
1063 raw=True,
1064 )
1065
1066 pkg = DefaultLexer(lines)
1067 self.package.inherit(pkg)
1068
1069 def parse_deps(self):
1070 keys, lines = self.read_block(
1071 pattern_start=LEXER_DEPS_BEGIN,
1072 pattern_line=LEXER_DEPS_LINE,
1073 pattern_end=LEXER_DEPS_END,
1074 raw=True,
1075 )
1076
1077 deps = DefaultLexer(lines)
1078 self.deps.inherit(deps)