makedocbook: Use sys.exit()

Use sys.exit() to write a message to stderr and terminate with a
non-zero exit code.
This commit is contained in:
Jon Turney 2022-11-01 11:21:21 +00:00
parent 2432d77099
commit 8b6c4249e2
No known key found for this signature in database
GPG Key ID: C7C86F0370285C81
1 changed files with 6 additions and 10 deletions

View File

@ -214,8 +214,7 @@ def function(c, l):
# FUNCTION implies starting a new refentry # FUNCTION implies starting a new refentry
if refentry is not None: if refentry is not None:
print("multiple FUNCTIONs without NEWPAGE", file=sys.stderr) sys.exit("multiple FUNCTIONs without NEWPAGE")
exit(1)
# create the refentry # create the refentry
refentry = lxml.etree.SubElement(rootelement, 'refentry') refentry = lxml.etree.SubElement(rootelement, 'refentry')
@ -308,17 +307,15 @@ def synopsis(c, t):
# a prototype without a terminating ';' is an error # a prototype without a terminating ';' is an error
if s.endswith(')'): if s.endswith(')'):
print("'%s' missing terminating semicolon" % l, file=sys.stderr) sys.exit("'%s' missing terminating semicolon" % l)
s = s + ';' s = s + ';'
exit(1)
if ';' in s: if ';' in s:
synopsis_for_prototype(funcsynopsis, s) synopsis_for_prototype(funcsynopsis, s)
s = '' s = ''
if s.strip(): if s.strip():
print("surplus synopsis '%s'" % s, file=sys.stderr) sys.exit("surplus synopsis '%s'" % s)
exit(1)
def synopsis_for_prototype(funcsynopsis, s): def synopsis_for_prototype(funcsynopsis, s):
s = s.strip() s = s.strip()
@ -591,8 +588,7 @@ def t_eof(t):
# Error handling rule # Error handling rule
def t_error(t): def t_error(t):
print("tokenization error, remaining text '%s'" % t.value, file=sys.stderr) sys.exit("tokenization error, remaining text '%s'" % t.value)
exit(1)
lexer = lex.lex() lexer = lex.lex()
@ -795,8 +791,8 @@ def p_multitable(p):
parser_verbose(p) parser_verbose(p)
def p_error(t): def p_error(t):
print('parse error at line %d, token %s, next token %s' % (t.lineno, t, parser.token()), file=sys.stderr) sys.exit('parse error at line %d, token %s, next token %s' % (t.lineno, t, parser.token()))
exit(1)
# protect creating the parser with a lockfile, so that when multiple processes # protect creating the parser with a lockfile, so that when multiple processes
# are running this script simultaneously, we don't get one of them generating a # are running this script simultaneously, we don't get one of them generating a