parser and virtualmachine seem to be functional again in python 3
--HG-- branch : mung
This commit is contained in:
parent
25db8c81e0
commit
caf91bc0a8
11 changed files with 1438 additions and 1438 deletions
2
bi.py
2
bi.py
|
@ -102,7 +102,7 @@ builtin_map.update({
|
|||
"""
|
||||
|
||||
builtin_map.update({
|
||||
'serverlog': bi.serverlog,
|
||||
b'serverlog': bi.serverlog,
|
||||
})
|
||||
|
||||
"""
|
||||
|
|
1486
bytecode.py
1486
bytecode.py
File diff suppressed because it is too large
Load diff
|
@ -134,7 +134,7 @@ class Database(object):
|
|||
i += 1
|
||||
break
|
||||
|
||||
def get_obj(self, objref):
|
||||
def get_obj(self, objnum):
|
||||
i = objnum
|
||||
if i < len(self.objects):
|
||||
self.objects[i:] = []
|
||||
|
@ -198,4 +198,4 @@ class Database(object):
|
|||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
298
ebnf.py
298
ebnf.py
|
@ -1,149 +1,149 @@
|
|||
# This module tries to implement ISO 14977 standard with pyparsing.
|
||||
# pyparsing version 1.1 or greater is required.
|
||||
|
||||
# ISO 14977 standardize The Extended Backus-Naur Form(EBNF) syntax.
|
||||
# You can read a final draft version here:
|
||||
# http://www.cl.cam.ac.uk/~mgk25/iso-ebnf.html
|
||||
|
||||
|
||||
from pyparsing import *
|
||||
|
||||
|
||||
all_names = '''
|
||||
integer
|
||||
meta_identifier
|
||||
terminal_string
|
||||
optional_sequence
|
||||
repeated_sequence
|
||||
grouped_sequence
|
||||
syntactic_primary
|
||||
syntactic_factor
|
||||
syntactic_term
|
||||
single_definition
|
||||
definitions_list
|
||||
syntax_rule
|
||||
syntax
|
||||
'''.split()
|
||||
|
||||
|
||||
integer = Word(nums)
|
||||
meta_identifier = Word(alphas, alphanums + '_')
|
||||
terminal_string = Suppress("'") + CharsNotIn("'") + Suppress("'") ^ \
|
||||
Suppress('"') + CharsNotIn('"') + Suppress('"')
|
||||
definitions_list = Forward()
|
||||
optional_sequence = Suppress('[') + definitions_list + Suppress(']')
|
||||
repeated_sequence = Suppress('{') + definitions_list + Suppress('}')
|
||||
grouped_sequence = Suppress('(') + definitions_list + Suppress(')')
|
||||
syntactic_primary = optional_sequence ^ repeated_sequence ^ \
|
||||
grouped_sequence ^ meta_identifier ^ terminal_string
|
||||
syntactic_factor = Optional(integer + Suppress('*')) + syntactic_primary
|
||||
syntactic_term = syntactic_factor + Optional(Suppress('-') + syntactic_factor)
|
||||
single_definition = delimitedList(syntactic_term, ',')
|
||||
definitions_list << delimitedList(single_definition, '|')
|
||||
syntax_rule = meta_identifier + Suppress('=') + definitions_list + \
|
||||
Suppress(';')
|
||||
|
||||
ebnfComment = ( "(*" +
|
||||
ZeroOrMore( CharsNotIn("*") | ( "*" + ~Literal(")") ) ) +
|
||||
"*)" ).streamline().setName("ebnfComment")
|
||||
|
||||
syntax = OneOrMore(syntax_rule)
|
||||
syntax.ignore(ebnfComment)
|
||||
|
||||
|
||||
def do_integer(str, loc, toks):
|
||||
return int(toks[0])
|
||||
|
||||
def do_meta_identifier(str, loc, toks):
|
||||
if toks[0] in symbol_table:
|
||||
return symbol_table[toks[0]]
|
||||
else:
|
||||
forward_count.value += 1
|
||||
symbol_table[toks[0]] = Forward()
|
||||
return symbol_table[toks[0]]
|
||||
|
||||
def do_terminal_string(str, loc, toks):
|
||||
return Literal(toks[0])
|
||||
|
||||
def do_optional_sequence(str, loc, toks):
|
||||
return Optional(toks[0])
|
||||
|
||||
def do_repeated_sequence(str, loc, toks):
|
||||
return ZeroOrMore(toks[0])
|
||||
|
||||
def do_grouped_sequence(str, loc, toks):
|
||||
return Group(toks[0])
|
||||
|
||||
def do_syntactic_primary(str, loc, toks):
|
||||
return toks[0]
|
||||
|
||||
def do_syntactic_factor(str, loc, toks):
|
||||
if len(toks) == 2:
|
||||
# integer * syntactic_primary
|
||||
return And([toks[1]] * toks[0])
|
||||
else:
|
||||
# syntactic_primary
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_syntactic_term(str, loc, toks):
|
||||
if len(toks) == 2:
|
||||
# syntactic_factor - syntactic_factor
|
||||
return NotAny(toks[1]) + toks[0]
|
||||
else:
|
||||
# syntactic_factor
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_single_definition(str, loc, toks):
|
||||
toks = toks.asList()
|
||||
if len(toks) > 1:
|
||||
# syntactic_term , syntactic_term , ...
|
||||
return And(toks)
|
||||
else:
|
||||
# syntactic_term
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_definitions_list(str, loc, toks):
|
||||
toks = toks.asList()
|
||||
if len(toks) > 1:
|
||||
# single_definition | single_definition | ...
|
||||
return Or(toks)
|
||||
else:
|
||||
# single_definition
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_syntax_rule(str, loc, toks):
|
||||
# meta_identifier = definitions_list ;
|
||||
assert toks[0].expr is None, "Duplicate definition"
|
||||
forward_count.value -= 1
|
||||
toks[0] << toks[1]
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_syntax(str, loc, toks):
|
||||
# syntax_rule syntax_rule ...
|
||||
return symbol_table
|
||||
|
||||
|
||||
|
||||
symbol_table = {}
|
||||
class forward_count:
|
||||
pass
|
||||
forward_count.value = 0
|
||||
for name in all_names:
|
||||
expr = vars()[name]
|
||||
action = vars()['do_' + name]
|
||||
expr.setName(name)
|
||||
expr.setParseAction(action)
|
||||
#~ expr.setDebug()
|
||||
|
||||
|
||||
def parse(ebnf, given_table={}):
|
||||
symbol_table.clear()
|
||||
symbol_table.update(given_table)
|
||||
forward_count.value = 0
|
||||
table = syntax.parseString(ebnf)[0]
|
||||
assert forward_count.value == 0, "Missing definition"
|
||||
for name in table:
|
||||
expr = table[name]
|
||||
expr.setName(name)
|
||||
#~ expr.setDebug()
|
||||
return table
|
||||
# This module tries to implement ISO 14977 standard with pyparsing.
|
||||
# pyparsing version 1.1 or greater is required.
|
||||
|
||||
# ISO 14977 standardize The Extended Backus-Naur Form(EBNF) syntax.
|
||||
# You can read a final draft version here:
|
||||
# http://www.cl.cam.ac.uk/~mgk25/iso-ebnf.html
|
||||
|
||||
|
||||
from pyparsing import *
|
||||
|
||||
|
||||
all_names = '''
|
||||
integer
|
||||
meta_identifier
|
||||
terminal_string
|
||||
optional_sequence
|
||||
repeated_sequence
|
||||
grouped_sequence
|
||||
syntactic_primary
|
||||
syntactic_factor
|
||||
syntactic_term
|
||||
single_definition
|
||||
definitions_list
|
||||
syntax_rule
|
||||
syntax
|
||||
'''.split()
|
||||
|
||||
|
||||
integer = Word(nums)
|
||||
meta_identifier = Word(alphas, alphanums + '_')
|
||||
terminal_string = Suppress("'") + CharsNotIn("'") + Suppress("'") ^ \
|
||||
Suppress('"') + CharsNotIn('"') + Suppress('"')
|
||||
definitions_list = Forward()
|
||||
optional_sequence = Suppress('[') + definitions_list + Suppress(']')
|
||||
repeated_sequence = Suppress('{') + definitions_list + Suppress('}')
|
||||
grouped_sequence = Suppress('(') + definitions_list + Suppress(')')
|
||||
syntactic_primary = optional_sequence ^ repeated_sequence ^ \
|
||||
grouped_sequence ^ meta_identifier ^ terminal_string
|
||||
syntactic_factor = Optional(integer + Suppress('*')) + syntactic_primary
|
||||
syntactic_term = syntactic_factor + Optional(Suppress('-') + syntactic_factor)
|
||||
single_definition = delimitedList(syntactic_term, ',')
|
||||
definitions_list << delimitedList(single_definition, '|')
|
||||
syntax_rule = meta_identifier + Suppress('=') + definitions_list + \
|
||||
Suppress(';')
|
||||
|
||||
ebnfComment = ( "(*" +
|
||||
ZeroOrMore( CharsNotIn("*") | ( "*" + ~Literal(")") ) ) +
|
||||
"*)" ).streamline().setName("ebnfComment")
|
||||
|
||||
syntax = OneOrMore(syntax_rule)
|
||||
syntax.ignore(ebnfComment)
|
||||
|
||||
|
||||
def do_integer(str, loc, toks):
|
||||
return int(toks[0])
|
||||
|
||||
def do_meta_identifier(str, loc, toks):
|
||||
if toks[0] in symbol_table:
|
||||
return symbol_table[toks[0]]
|
||||
else:
|
||||
forward_count.value += 1
|
||||
symbol_table[toks[0]] = Forward()
|
||||
return symbol_table[toks[0]]
|
||||
|
||||
def do_terminal_string(str, loc, toks):
|
||||
return Literal(toks[0])
|
||||
|
||||
def do_optional_sequence(str, loc, toks):
|
||||
return Optional(toks[0])
|
||||
|
||||
def do_repeated_sequence(str, loc, toks):
|
||||
return ZeroOrMore(toks[0])
|
||||
|
||||
def do_grouped_sequence(str, loc, toks):
|
||||
return Group(toks[0])
|
||||
|
||||
def do_syntactic_primary(str, loc, toks):
|
||||
return toks[0]
|
||||
|
||||
def do_syntactic_factor(str, loc, toks):
|
||||
if len(toks) == 2:
|
||||
# integer * syntactic_primary
|
||||
return And([toks[1]] * toks[0])
|
||||
else:
|
||||
# syntactic_primary
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_syntactic_term(str, loc, toks):
|
||||
if len(toks) == 2:
|
||||
# syntactic_factor - syntactic_factor
|
||||
return NotAny(toks[1]) + toks[0]
|
||||
else:
|
||||
# syntactic_factor
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_single_definition(str, loc, toks):
|
||||
toks = toks.asList()
|
||||
if len(toks) > 1:
|
||||
# syntactic_term , syntactic_term , ...
|
||||
return And(toks)
|
||||
else:
|
||||
# syntactic_term
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_definitions_list(str, loc, toks):
|
||||
toks = toks.asList()
|
||||
if len(toks) > 1:
|
||||
# single_definition | single_definition | ...
|
||||
return Or(toks)
|
||||
else:
|
||||
# single_definition
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_syntax_rule(str, loc, toks):
|
||||
# meta_identifier = definitions_list ;
|
||||
assert toks[0].expr is None, "Duplicate definition"
|
||||
forward_count.value -= 1
|
||||
toks[0] << toks[1]
|
||||
return [ toks[0] ]
|
||||
|
||||
def do_syntax(str, loc, toks):
|
||||
# syntax_rule syntax_rule ...
|
||||
return symbol_table
|
||||
|
||||
|
||||
|
||||
symbol_table = {}
|
||||
class forward_count:
|
||||
pass
|
||||
forward_count.value = 0
|
||||
for name in all_names:
|
||||
expr = vars()[name]
|
||||
action = vars()['do_' + name]
|
||||
expr.setName(name)
|
||||
expr.setParseAction(action)
|
||||
#~ expr.setDebug()
|
||||
|
||||
|
||||
def parse(ebnf, given_table={}):
|
||||
symbol_table.clear()
|
||||
symbol_table.update(given_table)
|
||||
forward_count.value = 0
|
||||
table = syntax.parseString(ebnf)[0]
|
||||
assert forward_count.value == 0, "Missing definition"
|
||||
for name in table:
|
||||
expr = table[name]
|
||||
expr.setName(name)
|
||||
#~ expr.setDebug()
|
||||
return table
|
||||
|
|
182
errors.py
182
errors.py
|
@ -1,92 +1,92 @@
|
|||
from pyenum import pyenum
|
||||
|
||||
enum = pyenum()
|
||||
enum.E_NONE = 0
|
||||
enum.E_PERM = 1
|
||||
enum.E_PROPNF = 2
|
||||
enum.E_FUNCNF = 3
|
||||
enum.E_FILENF = 4
|
||||
enum.E_VARNF = 5
|
||||
enum.E_INVARG = 6
|
||||
enum.E_TICKS = 7
|
||||
enum.E_SECONDS = 8
|
||||
enum.E_MEMORY = 9
|
||||
enum.E_IOERR = 10
|
||||
enum.E_TYPE = 11
|
||||
enum.E_ARGS = 12
|
||||
enum.E_FLOAT = 13
|
||||
enum.E_DIV = 14
|
||||
enum.E_SYNTAX = 15
|
||||
enum.E_UNICODE = 16
|
||||
enum.E_MAXREC = 17
|
||||
enum.E_PARSE = 18
|
||||
enum.E_RANGE = 19
|
||||
enum.E_INVIND = 20
|
||||
enum.E_RECMOVE = 21
|
||||
enum.E_NACC = 22
|
||||
enum.E_INVOBJ = 23
|
||||
enum.E_CONN = 24
|
||||
|
||||
enum.E_USER = 200
|
||||
enum.E_USER1 = 201
|
||||
enum.E_USER2 = 202
|
||||
enum.E_USER3 = 203
|
||||
enum.E_USER4 = 204
|
||||
enum.E_USER5 = 205
|
||||
enum.E_USER6 = 206
|
||||
enum.E_USER7 = 207
|
||||
enum.E_USER8 = 208
|
||||
enum.E_USER9 = 209
|
||||
enum.E_USER10 = 210
|
||||
|
||||
msgs = {
|
||||
enum.E_NONE: "No error",
|
||||
enum.E_PERM: "Permission denied",
|
||||
enum.E_PROPNF: "Property not found",
|
||||
enum.E_FUNCNF: "Function not found",
|
||||
enum.E_FILENF: "File not found",
|
||||
enum.E_VARNF: "Variable not found",
|
||||
enum.E_INVARG: "Invalid argument",
|
||||
enum.E_TICKS: "Out of ticks",
|
||||
enum.E_SECONDS: "Out of seconds",
|
||||
enum.E_MEMORY: "Out of memory",
|
||||
enum.E_IOERR: "I/O error",
|
||||
enum.E_TYPE: "Type mismatch",
|
||||
enum.E_ARGS: "Incorrect number of arguments",
|
||||
enum.E_FLOAT: "Floating point error",
|
||||
enum.E_DIV: "Division by zero",
|
||||
enum.E_SYNTAX: "Syntax error",
|
||||
enum.E_UNICODE: "Invalid unicode character",
|
||||
enum.E_MAXREC: "Maximum recursion depth reached",
|
||||
enum.E_PARSE: "Unable to parse command",
|
||||
enum.E_RANGE: "Index out of range",
|
||||
enum.E_INVIND: "Invalid indirection",
|
||||
enum.E_RECMOVE: "Recursive move",
|
||||
enum.E_NACC: "Move refused by destination",
|
||||
enum.E_INVOBJ: "Invalid object",
|
||||
enum.E_CONN: "Connection error",
|
||||
|
||||
enum.E_USER: "User-defined error",
|
||||
enum.E_USER1: "User-defined error 1",
|
||||
enum.E_USER2: "User-defined error 2",
|
||||
enum.E_USER3: "User-defined error 3",
|
||||
enum.E_USER4: "User-defined error 4",
|
||||
enum.E_USER5: "User-defined error 5",
|
||||
enum.E_USER6: "User-defined error 6",
|
||||
enum.E_USER7: "User-defined error 7",
|
||||
enum.E_USER8: "User-defined error 8",
|
||||
enum.E_USER9: "User-defined error 9",
|
||||
enum.E_USER10: "User-defined error 10",
|
||||
}
|
||||
|
||||
class VMRuntimeError(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
if msg == None and code in msgs:
|
||||
msg = msgs[code]
|
||||
elif msg == None:
|
||||
msg = "Unknown error code"
|
||||
Exception.__init__(self, msg)
|
||||
self.errorcode = code
|
||||
|
||||
|
||||
from pyenum import pyenum
|
||||
|
||||
enum = pyenum()
|
||||
enum.E_NONE = 0
|
||||
enum.E_PERM = 1
|
||||
enum.E_PROPNF = 2
|
||||
enum.E_FUNCNF = 3
|
||||
enum.E_FILENF = 4
|
||||
enum.E_VARNF = 5
|
||||
enum.E_INVARG = 6
|
||||
enum.E_TICKS = 7
|
||||
enum.E_SECONDS = 8
|
||||
enum.E_MEMORY = 9
|
||||
enum.E_IOERR = 10
|
||||
enum.E_TYPE = 11
|
||||
enum.E_ARGS = 12
|
||||
enum.E_FLOAT = 13
|
||||
enum.E_DIV = 14
|
||||
enum.E_SYNTAX = 15
|
||||
enum.E_UNICODE = 16
|
||||
enum.E_MAXREC = 17
|
||||
enum.E_PARSE = 18
|
||||
enum.E_RANGE = 19
|
||||
enum.E_INVIND = 20
|
||||
enum.E_RECMOVE = 21
|
||||
enum.E_NACC = 22
|
||||
enum.E_INVOBJ = 23
|
||||
enum.E_CONN = 24
|
||||
|
||||
enum.E_USER = 200
|
||||
enum.E_USER1 = 201
|
||||
enum.E_USER2 = 202
|
||||
enum.E_USER3 = 203
|
||||
enum.E_USER4 = 204
|
||||
enum.E_USER5 = 205
|
||||
enum.E_USER6 = 206
|
||||
enum.E_USER7 = 207
|
||||
enum.E_USER8 = 208
|
||||
enum.E_USER9 = 209
|
||||
enum.E_USER10 = 210
|
||||
|
||||
msgs = {
|
||||
enum.E_NONE: "No error",
|
||||
enum.E_PERM: "Permission denied",
|
||||
enum.E_PROPNF: "Property not found",
|
||||
enum.E_FUNCNF: "Function not found",
|
||||
enum.E_FILENF: "File not found",
|
||||
enum.E_VARNF: "Variable not found",
|
||||
enum.E_INVARG: "Invalid argument",
|
||||
enum.E_TICKS: "Out of ticks",
|
||||
enum.E_SECONDS: "Out of seconds",
|
||||
enum.E_MEMORY: "Out of memory",
|
||||
enum.E_IOERR: "I/O error",
|
||||
enum.E_TYPE: "Type mismatch",
|
||||
enum.E_ARGS: "Incorrect number of arguments",
|
||||
enum.E_FLOAT: "Floating point error",
|
||||
enum.E_DIV: "Division by zero",
|
||||
enum.E_SYNTAX: "Syntax error",
|
||||
enum.E_UNICODE: "Invalid unicode character",
|
||||
enum.E_MAXREC: "Maximum recursion depth reached",
|
||||
enum.E_PARSE: "Unable to parse command",
|
||||
enum.E_RANGE: "Index out of range",
|
||||
enum.E_INVIND: "Invalid indirection",
|
||||
enum.E_RECMOVE: "Recursive move",
|
||||
enum.E_NACC: "Move refused by destination",
|
||||
enum.E_INVOBJ: "Invalid object",
|
||||
enum.E_CONN: "Connection error",
|
||||
|
||||
enum.E_USER: "User-defined error",
|
||||
enum.E_USER1: "User-defined error 1",
|
||||
enum.E_USER2: "User-defined error 2",
|
||||
enum.E_USER3: "User-defined error 3",
|
||||
enum.E_USER4: "User-defined error 4",
|
||||
enum.E_USER5: "User-defined error 5",
|
||||
enum.E_USER6: "User-defined error 6",
|
||||
enum.E_USER7: "User-defined error 7",
|
||||
enum.E_USER8: "User-defined error 8",
|
||||
enum.E_USER9: "User-defined error 9",
|
||||
enum.E_USER10: "User-defined error 10",
|
||||
}
|
||||
|
||||
class VMRuntimeError(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
if msg == None and code in msgs:
|
||||
msg = msgs[code]
|
||||
elif msg == None:
|
||||
msg = "Unknown error code"
|
||||
Exception.__init__(self, msg)
|
||||
self.errorcode = code
|
||||
|
||||
|
||||
|
|
@ -1,75 +1,75 @@
|
|||
import sys
|
||||
import traceback
|
||||
import bisect
|
||||
from pyparsing import ParseException
|
||||
|
||||
def tokenparser(func):
|
||||
def newfunc(s, loc, tokens):
|
||||
try:
|
||||
rv = func(tokens)
|
||||
pos = lineno(s, loc)
|
||||
if isinstance(rv, VMBaseObject):
|
||||
rv.pos = pos
|
||||
return rv
|
||||
assert not False in [isinstance(x, (VMBaseObject, list)) for x in rv]
|
||||
for x in rv:
|
||||
if isinstance(x, VMBaseObject) and x.pos == None:
|
||||
x.pos = pos
|
||||
except:
|
||||
e = sys.exc_info()
|
||||
if e[0] == ParseException:
|
||||
raise
|
||||
gd = globals()
|
||||
funcobj = None
|
||||
for x in gd:
|
||||
if hasattr(gd[x], "parse") and gd[x].parse == newfunc:
|
||||
funcobj = x
|
||||
print("Error with %s.parse tokens: %s" % (funcobj, tokens))
|
||||
traceback.print_exc(e)
|
||||
raise
|
||||
|
||||
return [rv]
|
||||
|
||||
return newfunc
|
||||
|
||||
|
||||
def lineno(s, loc):
|
||||
if hash(s) in lineno.cache:
|
||||
cache = lineno.cache[hash(s)]
|
||||
else:
|
||||
cache = []
|
||||
|
||||
i = 0
|
||||
while True:
|
||||
n = s[i:].find("\n") + i
|
||||
if n < i:
|
||||
break
|
||||
cache.append(n)
|
||||
i = n + 1
|
||||
|
||||
cache.append(len(s))
|
||||
|
||||
lineno.cache[hash(s)] = cache
|
||||
|
||||
cachepos = bisect.bisect_left(cache, loc)
|
||||
line = cachepos + 1
|
||||
if cachepos == 0:
|
||||
char = loc + 1
|
||||
else:
|
||||
char = loc - cache[cachepos-1]
|
||||
|
||||
|
||||
return (line, char)
|
||||
|
||||
lineno.cache = {}
|
||||
|
||||
|
||||
class VMBaseObject(object):
|
||||
def __init__(self):
|
||||
self.pos = None
|
||||
|
||||
def bytecode(self):
|
||||
return [self]
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s>" % (self.__class__.__name__,)
|
||||
import sys
|
||||
import traceback
|
||||
import bisect
|
||||
from pyparsing import ParseException
|
||||
|
||||
def tokenparser(func):
|
||||
def newfunc(s, loc, tokens):
|
||||
try:
|
||||
rv = func(tokens)
|
||||
pos = lineno(s, loc)
|
||||
if isinstance(rv, VMBaseObject):
|
||||
rv.pos = pos
|
||||
return rv
|
||||
assert not False in [isinstance(x, (VMBaseObject, list)) for x in rv]
|
||||
for x in rv:
|
||||
if isinstance(x, VMBaseObject) and x.pos == None:
|
||||
x.pos = pos
|
||||
except:
|
||||
e = sys.exc_info()
|
||||
if e[0] == ParseException:
|
||||
raise
|
||||
gd = globals()
|
||||
funcobj = None
|
||||
for x in gd:
|
||||
if hasattr(gd[x], "parse") and gd[x].parse == newfunc:
|
||||
funcobj = x
|
||||
print("Error with %s.parse tokens: %s" % (funcobj, tokens))
|
||||
traceback.print_exc(e)
|
||||
raise
|
||||
|
||||
return [rv]
|
||||
|
||||
return newfunc
|
||||
|
||||
|
||||
def lineno(s, loc):
|
||||
if hash(s) in lineno.cache:
|
||||
cache = lineno.cache[hash(s)]
|
||||
else:
|
||||
cache = []
|
||||
|
||||
i = 0
|
||||
while True:
|
||||
n = s[i:].find("\n") + i
|
||||
if n < i:
|
||||
break
|
||||
cache.append(n)
|
||||
i = n + 1
|
||||
|
||||
cache.append(len(s))
|
||||
|
||||
lineno.cache[hash(s)] = cache
|
||||
|
||||
cachepos = bisect.bisect_left(cache, loc)
|
||||
line = cachepos + 1
|
||||
if cachepos == 0:
|
||||
char = loc + 1
|
||||
else:
|
||||
char = loc - cache[cachepos-1]
|
||||
|
||||
|
||||
return (line, char)
|
||||
|
||||
lineno.cache = {}
|
||||
|
||||
|
||||
class VMBaseObject(object):
|
||||
def __init__(self):
|
||||
self.pos = None
|
||||
|
||||
def bytecode(self):
|
||||
return [self]
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s>" % (self.__class__.__name__,)
|
||||
|
|
|
@ -1,222 +1,222 @@
|
|||
from language_tools import *
|
||||
from pyparsing import ParseException
|
||||
from bytecode import *
|
||||
|
||||
def disallow_keywords(tokens,keywords=None):
|
||||
if keywords == None:
|
||||
keywords = disallow_keywords.keywords
|
||||
else:
|
||||
keywords = set(keywords)
|
||||
|
||||
for t in tokens:
|
||||
if isinstance(t, VMIdent):
|
||||
if t.name in keywords:
|
||||
raise ParseException("Restricted keyword: %s" % (t.name,))
|
||||
elif isinstance(t, str):
|
||||
tstr = t.encode('ascii', 'ignore')
|
||||
if tstr in keywords:
|
||||
raise ParseException("Restricted keyword: %s" % (tstr,))
|
||||
elif isinstance(t, str):
|
||||
if t in keywords:
|
||||
raise ParseException("Restricted keyword: %s" % (t,))
|
||||
|
||||
disallow_keywords.keywords = set('if,elseif,else,endif,try,except,finally,endtry,while,endwhile,continue,break,for,foreach,endfor'.split(','))
|
||||
|
||||
|
||||
class ObjRef(object):
|
||||
def __init__(self, objnum):
|
||||
self.objnum = objnum
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.objnum == other.objnum
|
||||
|
||||
|
||||
|
||||
|
||||
class VMType(VMBaseObject):
|
||||
def bytecode(self):
|
||||
return [StackLiteral(self)]
|
||||
|
||||
class VMInteger(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = int(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMInteger(tokens[0]))
|
||||
|
||||
def __repr__(self):
|
||||
return "%s" % (self.value,)
|
||||
|
||||
class VMFloat(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = float(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMFloat(tokens[0]))
|
||||
|
||||
def __repr__(self):
|
||||
return "%s" % (self.value,)
|
||||
|
||||
|
||||
class VMTable(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = dict(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMTable(tokens[0]))
|
||||
|
||||
class VMList(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = list(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMList())
|
||||
|
||||
class VMTablePair(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMList())
|
||||
|
||||
class VMString(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
if isinstance(value, str):
|
||||
self.value = value
|
||||
else:
|
||||
self.value = str(value, 'ascii', 'ignore')
|
||||
|
||||
def __repr__(self):
|
||||
return "\"%s\"" % (repr(self.value)[1:].strip("'").replace("\\'", "'").replace('"', '\\"'),)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMString(tokens[0]))
|
||||
|
||||
class VMObjRef(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
if isinstance(value, ObjRef):
|
||||
self.value = value
|
||||
elif isinstance(value, (float, int)):
|
||||
self.value = int(value)
|
||||
else:
|
||||
raise TypeError("Attempted to create VMObjRef with invalid object reference: %r" % (value,))
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMObjRef(int(tokens[1])))
|
||||
|
||||
def __repr__(self):
|
||||
return "#%s" % (self.value,)
|
||||
|
||||
class VMRef(VMBaseObject):
|
||||
pass
|
||||
|
||||
class VMIdent(VMRef):
|
||||
def __init__(self, name):
|
||||
VMRef.__init__(self)
|
||||
self.name = name
|
||||
|
||||
def bytecode(self):
|
||||
return [StackLiteral(str(self.name))]
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
disallow_keywords(tokens)
|
||||
return VMIdent(tokens[0])
|
||||
|
||||
def __repr__(self):
|
||||
return "<ident %s>" % (self.name,)
|
||||
|
||||
|
||||
class VMVariable(VMRef):
|
||||
def __init__(self, name):
|
||||
VMRef.__init__(self)
|
||||
self.name = name
|
||||
|
||||
def ref(self):
|
||||
return [StackLiteral(str(self.name))]
|
||||
|
||||
def bytecode(self):
|
||||
return codejoin(self.ref(), GetVariable())
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
disallow_keywords(tokens)
|
||||
return VMVariable(tokens[0])
|
||||
|
||||
def __repr__(self):
|
||||
return "<variable %s>" % (self.name,)
|
||||
|
||||
class VMFileRef(VMRef):
|
||||
def __init__(self, obj, name):
|
||||
VMRef.__init__(self)
|
||||
self.obj = obj
|
||||
self.name = name
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
assert tokens[1] == "!"
|
||||
return VMFileRef(tokens[0], tokens[2])
|
||||
#return codejoin(tokens[0], StackLiteral(tokens[2]), GetProperty())
|
||||
|
||||
def ref(self):
|
||||
return [self.obj, self.name]
|
||||
|
||||
def __repr__(self):
|
||||
return "<fileref %s!%s>" % (self.obj, self.name)
|
||||
|
||||
def bytecode(self):
|
||||
return codejoin(self.ref(), GetFile())
|
||||
|
||||
class VMPropRef(VMRef):
|
||||
def __init__(self, obj, prop):
|
||||
VMRef.__init__(self)
|
||||
self.obj = obj
|
||||
self.prop = prop
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
assert tokens[1] == "."
|
||||
return VMPropRef(tokens[0], tokens[2])
|
||||
#return codejoin(tokens[0], StackLiteral(tokens[2]), GetProperty())
|
||||
|
||||
def ref(self):
|
||||
return [self.obj, self.prop]
|
||||
|
||||
def __repr__(self):
|
||||
return "<propref %s.%s>" % (self.obj, self.prop)
|
||||
|
||||
def bytecode(self):
|
||||
return codejoin(self.ref(), GetProperty())
|
||||
|
||||
class VMCoreRef(VMPropRef):
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return VMPropRef(VMObjRef(0), tokens[1])
|
||||
|
||||
from language_tools import *
|
||||
from pyparsing import ParseException
|
||||
from bytecode import *
|
||||
|
||||
def disallow_keywords(tokens,keywords=None):
|
||||
if keywords == None:
|
||||
keywords = disallow_keywords.keywords
|
||||
else:
|
||||
keywords = set(keywords)
|
||||
|
||||
for t in tokens:
|
||||
if isinstance(t, VMIdent):
|
||||
if t.name in keywords:
|
||||
raise ParseException("Restricted keyword: %s" % (t.name,))
|
||||
elif isinstance(t, str):
|
||||
tstr = t.encode('ascii', 'ignore')
|
||||
if tstr in keywords:
|
||||
raise ParseException("Restricted keyword: %s" % (tstr,))
|
||||
elif isinstance(t, str):
|
||||
if t in keywords:
|
||||
raise ParseException("Restricted keyword: %s" % (t,))
|
||||
|
||||
disallow_keywords.keywords = set('if,elseif,else,endif,try,except,finally,endtry,while,endwhile,continue,break,for,foreach,endfor'.split(','))
|
||||
|
||||
|
||||
class ObjRef(object):
|
||||
def __init__(self, objnum):
|
||||
self.objnum = objnum
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.objnum == other.objnum
|
||||
|
||||
|
||||
|
||||
|
||||
class VMType(VMBaseObject):
|
||||
def bytecode(self):
|
||||
return [StackLiteral(self)]
|
||||
|
||||
class VMInteger(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = int(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMInteger(tokens[0]))
|
||||
|
||||
def __repr__(self):
|
||||
return "%s" % (self.value,)
|
||||
|
||||
class VMFloat(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = float(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMFloat(tokens[0]))
|
||||
|
||||
def __repr__(self):
|
||||
return "%s" % (self.value,)
|
||||
|
||||
|
||||
class VMTable(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = dict(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMTable(tokens[0]))
|
||||
|
||||
class VMList(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.value = list(value)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMList())
|
||||
|
||||
class VMTablePair(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMList())
|
||||
|
||||
class VMString(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
if isinstance(value, str):
|
||||
self.value = value
|
||||
else:
|
||||
self.value = str(value, 'ascii', 'ignore')
|
||||
|
||||
def __repr__(self):
|
||||
return "\"%s\"" % (repr(self.value)[1:].strip("'").replace("\\'", "'").replace('"', '\\"'),)
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMString(tokens[0]))
|
||||
|
||||
class VMObjRef(VMType):
|
||||
def __init__(self, value):
|
||||
VMType.__init__(self)
|
||||
if isinstance(value, ObjRef):
|
||||
self.value = value
|
||||
elif isinstance(value, (float, int)):
|
||||
self.value = int(value)
|
||||
else:
|
||||
raise TypeError("Attempted to create VMObjRef with invalid object reference: %r" % (value,))
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return StackLiteral(VMObjRef(int(tokens[1])))
|
||||
|
||||
def __repr__(self):
|
||||
return "#%s" % (self.value,)
|
||||
|
||||
class VMRef(VMBaseObject):
|
||||
pass
|
||||
|
||||
class VMIdent(VMRef):
|
||||
def __init__(self, name):
|
||||
VMRef.__init__(self)
|
||||
self.name = name
|
||||
|
||||
def bytecode(self):
|
||||
return [StackLiteral(str(self.name))]
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
disallow_keywords(tokens)
|
||||
return VMIdent(tokens[0])
|
||||
|
||||
def __repr__(self):
|
||||
return "<ident %s>" % (self.name,)
|
||||
|
||||
|
||||
class VMVariable(VMRef):
|
||||
def __init__(self, name):
|
||||
VMRef.__init__(self)
|
||||
self.name = name
|
||||
|
||||
def ref(self):
|
||||
return [StackLiteral(str(self.name))]
|
||||
|
||||
def bytecode(self):
|
||||
return codejoin(self.ref(), GetVariable())
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
disallow_keywords(tokens)
|
||||
return VMVariable(tokens[0])
|
||||
|
||||
def __repr__(self):
|
||||
return "<variable %s>" % (self.name,)
|
||||
|
||||
class VMFileRef(VMRef):
|
||||
def __init__(self, obj, name):
|
||||
VMRef.__init__(self)
|
||||
self.obj = obj
|
||||
self.name = name
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
assert tokens[1] == "!"
|
||||
return VMFileRef(tokens[0], tokens[2])
|
||||
#return codejoin(tokens[0], StackLiteral(tokens[2]), GetProperty())
|
||||
|
||||
def ref(self):
|
||||
return [self.obj, self.name]
|
||||
|
||||
def __repr__(self):
|
||||
return "<fileref %s!%s>" % (self.obj, self.name)
|
||||
|
||||
def bytecode(self):
|
||||
return codejoin(self.ref(), GetFile())
|
||||
|
||||
class VMPropRef(VMRef):
|
||||
def __init__(self, obj, prop):
|
||||
VMRef.__init__(self)
|
||||
self.obj = obj
|
||||
self.prop = prop
|
||||
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
assert tokens[1] == "."
|
||||
return VMPropRef(tokens[0], tokens[2])
|
||||
#return codejoin(tokens[0], StackLiteral(tokens[2]), GetProperty())
|
||||
|
||||
def ref(self):
|
||||
return [self.obj, self.prop]
|
||||
|
||||
def __repr__(self):
|
||||
return "<propref %s.%s>" % (self.obj, self.prop)
|
||||
|
||||
def bytecode(self):
|
||||
return codejoin(self.ref(), GetProperty())
|
||||
|
||||
class VMCoreRef(VMPropRef):
|
||||
@staticmethod
|
||||
@tokenparser
|
||||
def parse(tokens):
|
||||
return VMPropRef(VMObjRef(0), tokens[1])
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ def bytecode_flatten(l, ltypes=(list, tuple, ParseResults)):
|
|||
if broken:
|
||||
continue
|
||||
assert not isinstance(l[i], ltypes)
|
||||
assert not isinstance(l[i], (str, unicode, dict))
|
||||
assert not isinstance(l[i], (str, bytes, dict))
|
||||
bc = l[i].bytecode()
|
||||
if len(bc) > 1 or bc[0] != l[i]:
|
||||
l[i:i+1] = bc
|
||||
|
@ -51,4 +51,4 @@ def optimize(data):
|
|||
print("Missing position on element %s" % (x,))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
|
|
268
pbkdf2.py
268
pbkdf2.py
|
@ -1,134 +1,134 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pbkdf2
|
||||
~~~~~~
|
||||
|
||||
This module implements pbkdf2 for Python. It also has some basic
|
||||
tests that ensure that it works. The implementation is straightforward
|
||||
and uses stdlib only stuff and can be easily be copy/pasted into
|
||||
your favourite application.
|
||||
|
||||
Use this as replacement for bcrypt that does not need a c implementation
|
||||
of a modified blowfish crypto algo.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> pbkdf2_hex('what i want to hash', 'the random salt')
|
||||
'fa7cc8a2b0a932f8e6ea42f9787e9d36e592e0c222ada6a9'
|
||||
|
||||
How to use this:
|
||||
|
||||
1. Use a constant time string compare function to compare the stored hash
|
||||
with the one you're generating::
|
||||
|
||||
def safe_str_cmp(a, b):
|
||||
if len(a) != len(b):
|
||||
return False
|
||||
rv = 0
|
||||
for x, y in izip(a, b):
|
||||
rv |= ord(x) ^ ord(y)
|
||||
return rv == 0
|
||||
|
||||
2. Use `os.urandom` to generate a proper salt of at least 8 byte.
|
||||
Use a unique salt per hashed password.
|
||||
|
||||
3. Store ``algorithm$salt:costfactor$hash`` in the database so that
|
||||
you can upgrade later easily to a different algorithm if you need
|
||||
one. For instance ``PBKDF2-256$thesalt:10000$deadbeef...``.
|
||||
|
||||
|
||||
:copyright: (c) Copyright 2011 by Armin Ronacher.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import hmac
|
||||
import hashlib
|
||||
from struct import Struct
|
||||
from operator import xor
|
||||
from itertools import starmap
|
||||
import binascii
|
||||
|
||||
|
||||
_pack_int = Struct('>I').pack
|
||||
|
||||
|
||||
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
|
||||
"""Like :func:`pbkdf2_bin` but returns a hex encoded string."""
|
||||
return str(binascii.hexlify(pbkdf2_bin(data, salt, iterations, keylen, hashfunc)), 'ascii')
|
||||
|
||||
|
||||
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
|
||||
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
|
||||
with the given `salt`. It iterates `iterations` time and produces a
|
||||
key of `keylen` bytes. By default SHA-256 is used as hash function,
|
||||
a different hashlib `hashfunc` can be provided.
|
||||
"""
|
||||
|
||||
bchr = lambda v: bytes((v,))
|
||||
|
||||
hashfunc = hashfunc or hashlib.sha256
|
||||
mac = hmac.new(data, None, hashfunc)
|
||||
def _pseudorandom(x, mac=mac):
|
||||
h = mac.copy()
|
||||
h.update(x)
|
||||
return h.digest()
|
||||
buf = []
|
||||
for block in range(1, -(-keylen // mac.digest_size) + 1):
|
||||
rv = u = _pseudorandom(salt + _pack_int(block))
|
||||
for i in range(iterations - 1):
|
||||
u = _pseudorandom(b''.join(map(bchr, u)))
|
||||
rv = starmap(xor, zip(rv, u))
|
||||
buf.extend(rv)
|
||||
return b''.join(map(bchr, buf))[:keylen]
|
||||
|
||||
|
||||
def test():
|
||||
failed = []
|
||||
def check(data, salt, iterations, keylen, expected):
|
||||
rv = pbkdf2_hex(bytes(data, "utf-8"), bytes(salt, "utf-8"), iterations, keylen, hashlib.sha1)
|
||||
if rv != expected:
|
||||
print('Test failed:')
|
||||
print(' Expected: %s' % expected)
|
||||
print(' Got: %s' % rv)
|
||||
print(' Parameters:')
|
||||
print(' data=%s' % data)
|
||||
print(' salt=%s' % salt)
|
||||
print(' iterations=%d' % iterations)
|
||||
print
|
||||
failed.append(1)
|
||||
|
||||
# From RFC 6070
|
||||
check('password', 'salt', 1, 20,
|
||||
'0c60c80f961f0e71f3a9b524af6012062fe037a6')
|
||||
check('password', 'salt', 2, 20,
|
||||
'ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957')
|
||||
check('password', 'salt', 4096, 20,
|
||||
'4b007901b765489abead49d926f721d065a429c1')
|
||||
check('passwordPASSWORDpassword', 'saltSALTsaltSALTsaltSALTsaltSALTsalt',
|
||||
4096, 25, '3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038')
|
||||
check('pass\x00word', 'sa\x00lt', 4096, 16,
|
||||
'56fa6aa75548099dcc37d7f03425e0c3')
|
||||
# This one is from the RFC but it just takes for ages
|
||||
##check('password', 'salt', 16777216, 20,
|
||||
## 'eefe3d61cd4da4e4e9945b3d6ba2158c2634e984')
|
||||
|
||||
# From Crypt-PBKDF2
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 1, 16,
|
||||
'cdedb5281bb2f801565a1122b2563515')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 1, 32,
|
||||
'cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 2, 16,
|
||||
'01dbee7f4a9e243e988b62c73cda935d')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 2, 32,
|
||||
'01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 1200, 32,
|
||||
'5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13')
|
||||
check('X' * 64, 'pass phrase equals block size', 1200, 32,
|
||||
'139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1')
|
||||
check('X' * 65, 'pass phrase exceeds block size', 1200, 32,
|
||||
'9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a')
|
||||
|
||||
raise SystemExit(bool(failed))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
pbkdf2
|
||||
~~~~~~
|
||||
|
||||
This module implements pbkdf2 for Python. It also has some basic
|
||||
tests that ensure that it works. The implementation is straightforward
|
||||
and uses stdlib only stuff and can be easily be copy/pasted into
|
||||
your favourite application.
|
||||
|
||||
Use this as replacement for bcrypt that does not need a c implementation
|
||||
of a modified blowfish crypto algo.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> pbkdf2_hex('what i want to hash', 'the random salt')
|
||||
'fa7cc8a2b0a932f8e6ea42f9787e9d36e592e0c222ada6a9'
|
||||
|
||||
How to use this:
|
||||
|
||||
1. Use a constant time string compare function to compare the stored hash
|
||||
with the one you're generating::
|
||||
|
||||
def safe_str_cmp(a, b):
|
||||
if len(a) != len(b):
|
||||
return False
|
||||
rv = 0
|
||||
for x, y in izip(a, b):
|
||||
rv |= ord(x) ^ ord(y)
|
||||
return rv == 0
|
||||
|
||||
2. Use `os.urandom` to generate a proper salt of at least 8 byte.
|
||||
Use a unique salt per hashed password.
|
||||
|
||||
3. Store ``algorithm$salt:costfactor$hash`` in the database so that
|
||||
you can upgrade later easily to a different algorithm if you need
|
||||
one. For instance ``PBKDF2-256$thesalt:10000$deadbeef...``.
|
||||
|
||||
|
||||
:copyright: (c) Copyright 2011 by Armin Ronacher.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import hmac
|
||||
import hashlib
|
||||
from struct import Struct
|
||||
from operator import xor
|
||||
from itertools import starmap
|
||||
import binascii
|
||||
|
||||
|
||||
_pack_int = Struct('>I').pack
|
||||
|
||||
|
||||
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
|
||||
"""Like :func:`pbkdf2_bin` but returns a hex encoded string."""
|
||||
return str(binascii.hexlify(pbkdf2_bin(data, salt, iterations, keylen, hashfunc)), 'ascii')
|
||||
|
||||
|
||||
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
|
||||
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
|
||||
with the given `salt`. It iterates `iterations` time and produces a
|
||||
key of `keylen` bytes. By default SHA-256 is used as hash function,
|
||||
a different hashlib `hashfunc` can be provided.
|
||||
"""
|
||||
|
||||
bchr = lambda v: bytes((v,))
|
||||
|
||||
hashfunc = hashfunc or hashlib.sha256
|
||||
mac = hmac.new(data, None, hashfunc)
|
||||
def _pseudorandom(x, mac=mac):
|
||||
h = mac.copy()
|
||||
h.update(x)
|
||||
return h.digest()
|
||||
buf = []
|
||||
for block in range(1, -(-keylen // mac.digest_size) + 1):
|
||||
rv = u = _pseudorandom(salt + _pack_int(block))
|
||||
for i in range(iterations - 1):
|
||||
u = _pseudorandom(b''.join(map(bchr, u)))
|
||||
rv = starmap(xor, zip(rv, u))
|
||||
buf.extend(rv)
|
||||
return b''.join(map(bchr, buf))[:keylen]
|
||||
|
||||
|
||||
def test():
|
||||
failed = []
|
||||
def check(data, salt, iterations, keylen, expected):
|
||||
rv = pbkdf2_hex(bytes(data, "utf-8"), bytes(salt, "utf-8"), iterations, keylen, hashlib.sha1)
|
||||
if rv != expected:
|
||||
print('Test failed:')
|
||||
print(' Expected: %s' % expected)
|
||||
print(' Got: %s' % rv)
|
||||
print(' Parameters:')
|
||||
print(' data=%s' % data)
|
||||
print(' salt=%s' % salt)
|
||||
print(' iterations=%d' % iterations)
|
||||
print
|
||||
failed.append(1)
|
||||
|
||||
# From RFC 6070
|
||||
check('password', 'salt', 1, 20,
|
||||
'0c60c80f961f0e71f3a9b524af6012062fe037a6')
|
||||
check('password', 'salt', 2, 20,
|
||||
'ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957')
|
||||
check('password', 'salt', 4096, 20,
|
||||
'4b007901b765489abead49d926f721d065a429c1')
|
||||
check('passwordPASSWORDpassword', 'saltSALTsaltSALTsaltSALTsaltSALTsalt',
|
||||
4096, 25, '3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038')
|
||||
check('pass\x00word', 'sa\x00lt', 4096, 16,
|
||||
'56fa6aa75548099dcc37d7f03425e0c3')
|
||||
# This one is from the RFC but it just takes for ages
|
||||
##check('password', 'salt', 16777216, 20,
|
||||
## 'eefe3d61cd4da4e4e9945b3d6ba2158c2634e984')
|
||||
|
||||
# From Crypt-PBKDF2
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 1, 16,
|
||||
'cdedb5281bb2f801565a1122b2563515')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 1, 32,
|
||||
'cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 2, 16,
|
||||
'01dbee7f4a9e243e988b62c73cda935d')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 2, 32,
|
||||
'01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86')
|
||||
check('password', 'ATHENA.MIT.EDUraeburn', 1200, 32,
|
||||
'5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13')
|
||||
check('X' * 64, 'pass phrase equals block size', 1200, 32,
|
||||
'139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1')
|
||||
check('X' * 65, 'pass phrase exceeds block size', 1200, 32,
|
||||
'9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a')
|
||||
|
||||
raise SystemExit(bool(failed))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
32
pyenum.py
32
pyenum.py
|
@ -1,17 +1,17 @@
|
|||
class pyenum(object):
|
||||
def __setattr__(self, name, val):
|
||||
global enum_reversals
|
||||
|
||||
object.__setattr__(self, name, val)
|
||||
if not self in enum_reversals:
|
||||
enum_reversals[self] = {}
|
||||
enum_reversals[self][val] = name
|
||||
|
||||
enum_reversals = {}
|
||||
|
||||
def reverse_enum(e, v):
|
||||
global enum_reversals
|
||||
|
||||
if e in enum_reversals:
|
||||
return enum_reversals[e][v]
|
||||
class pyenum(object):
|
||||
def __setattr__(self, name, val):
|
||||
global enum_reversals
|
||||
|
||||
object.__setattr__(self, name, val)
|
||||
if not self in enum_reversals:
|
||||
enum_reversals[self] = {}
|
||||
enum_reversals[self][val] = name
|
||||
|
||||
enum_reversals = {}
|
||||
|
||||
def reverse_enum(e, v):
|
||||
global enum_reversals
|
||||
|
||||
if e in enum_reversals:
|
||||
return enum_reversals[e][v]
|
||||
return None
|
|
@ -111,11 +111,11 @@ class VirtualMachine(object):
|
|||
self.task.exc_stack.append(exc)
|
||||
|
||||
def pop(self, count=1):
|
||||
stack = [uncoerce(self.task.stack.pop()) for x in xrange(count)]
|
||||
stack = [uncoerce(self.task.stack.pop()) for x in range(count)]
|
||||
return [x for x in reversed(stack)]
|
||||
|
||||
def pop_raw(self, count=1):
|
||||
stack = [self.task.stack.pop() for x in xrange(count)]
|
||||
stack = [self.task.stack.pop() for x in range(count)]
|
||||
return [x for x in reversed(stack)]
|
||||
|
||||
def push(self, value):
|
||||
|
@ -251,4 +251,4 @@ class VirtualMachine(object):
|
|||
static_vm = VirtualMachine(None)
|
||||
|
||||
if __name__ == "__main__":
|
||||
static_vm.test()
|
||||
static_vm.test()
|
||||
|
|
Loading…
Add table
Reference in a new issue