Commit 1c76b784 authored by Igor Dejanovic's avatar Igor Dejanovic

Various changes on textx and pyflies languages.

parent ab924aea
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
from collections import namedtuple from collections import namedtuple
from arpeggio import StrMatch, Optional, ZeroOrMore, OneOrMore, Sequence,\ from arpeggio import StrMatch, Optional, ZeroOrMore, OneOrMore, Sequence,\
OrderedChoice, RegExMatch, EOF,\ OrderedChoice, RegExMatch, NoMatch, EOF,\
SemanticAction,ParserPython, Combine, Parser, SemanticActionSingleChild,\ SemanticAction,ParserPython, Combine, Parser, SemanticActionSingleChild,\
SemanticActionBodyWithBraces SemanticActionBodyWithBraces
from arpeggio.export import PMDOTExporter, PTDOTExporter from arpeggio.export import PMDOTExporter, PTDOTExporter
...@@ -42,7 +42,7 @@ def repeat_operator(): return ['*', '?', '+'] ...@@ -42,7 +42,7 @@ def repeat_operator(): return ['*', '?', '+']
def assignment(): return attribute, assignment_op, assignment_rhs def assignment(): return attribute, assignment_op, assignment_rhs
def attribute(): return ident def attribute(): return ident
def assignment_op(): return ["=", "*=", "+=", "?="] def assignment_op(): return ["=", "*=", "+=", "?="]
def assignment_rhs(): return [rule_ref, list_match, terminal_match] def assignment_rhs(): return [rule_ref, list_match, terminal_match, bracketed_choice]
# Match # Match
def match(): return [terminal_match, list_match, rule_ref] def match(): return [terminal_match, list_match, rule_ref]
...@@ -57,8 +57,8 @@ def list_separator(): return terminal_match ...@@ -57,8 +57,8 @@ def list_separator(): return terminal_match
# Rule reference # Rule reference
def rule_ref(): return [rule_match, rule_link] def rule_ref(): return [rule_match, rule_link]
def rule_match(): return ident def rule_match(): return ident
def rule_link(): return '[', rule_choice, ']' def rule_link(): return '[', rule_name, ']'
def rule_choice(): return rule_name, ZeroOrMore('|', rule_name) #def rule_choice(): return rule_name, ZeroOrMore('|', rule_name)
def rule_name(): return ident def rule_name(): return ident
def ident(): return _(r'\w+') def ident(): return _(r'\w+')
...@@ -70,6 +70,11 @@ def comment_line(): return _(r'//.*$') ...@@ -70,6 +70,11 @@ def comment_line(): return _(r'//.*$')
def comment_block(): return _(r'/\*(.|\n)*?\*/') def comment_block(): return _(r'/\*(.|\n)*?\*/')
# Special rules - primitive types
ID = _(r'[^\d\W]\w*\b', rule='ID', root=True)
INT = _(r'[-+]?[0-9]+', rule='INT', root=True)
FLOAT = _(r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', 'FLOAT', root=True)
STRING = _(r'("[^"]*")|(\'[^\']*\')', 'STRING', root=True)
class RuleMatchCrossRef(object): class RuleMatchCrossRef(object):
"""Helper class used for cross reference resolving.""" """Helper class used for cross reference resolving."""
...@@ -78,7 +83,16 @@ class RuleMatchCrossRef(object): ...@@ -78,7 +83,16 @@ class RuleMatchCrossRef(object):
self.position = position self.position = position
class TextXSemanticError(Exception): # TextX Exceptions
class TextXError(Exception):
pass
class TextXSemanticError(TextXError):
pass
class TextXSyntaxError(TextXError):
pass pass
...@@ -94,11 +108,18 @@ class TextXModelSA(SemanticAction): ...@@ -94,11 +108,18 @@ class TextXModelSA(SemanticAction):
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(TextXLanguageParser, self).__init__(*args, **kwargs) super(TextXLanguageParser, self).__init__(*args, **kwargs)
self.parser_model = Sequence(nodes=children[:], rule='model', root=True)
# By default first rule is starting rule
self.parser_model = children[0]
self.comments_model = parser._peg_rules.get('__comment', None) self.comments_model = parser._peg_rules.get('__comment', None)
self.debug = parser.debug
def _parse(self): def _parse(self):
try:
return self.parser_model.parse(self) return self.parser_model.parse(self)
except NoMatch as e:
raise TextXSyntaxError(str(e))
textx_parser = TextXLanguageParser() textx_parser = TextXLanguageParser()
...@@ -152,10 +173,13 @@ class TextXModelSA(SemanticAction): ...@@ -152,10 +173,13 @@ class TextXModelSA(SemanticAction):
return textx_parser return textx_parser
textx_model.sem = TextXModelSA()
class MetaClassSA(SemanticAction):
def first_pass(self, parser, node, children): def metaclass_SA(parser, node, children):
rule_name, rule = children rule_name, rule = children
rule.rule = rule_name
rule.root = True
# Do some name mangling for comment rule # Do some name mangling for comment rule
# to prevent refererencing from other rules # to prevent refererencing from other rules
...@@ -164,10 +188,9 @@ class MetaClassSA(SemanticAction): ...@@ -164,10 +188,9 @@ class MetaClassSA(SemanticAction):
parser._peg_rules[rule_name] = rule parser._peg_rules[rule_name] = rule
return rule return rule
metaclass.sem = metaclass_SA
def metaclass_name_SA(parser, node, children):
class MetaClassNameSA(SemanticAction):
def first_pass(self, parser, node, children):
class Meta(object): class Meta(object):
"""Dynamic metaclass.""" """Dynamic metaclass."""
pass pass
...@@ -183,26 +206,32 @@ class MetaClassNameSA(SemanticAction): ...@@ -183,26 +206,32 @@ class MetaClassNameSA(SemanticAction):
parser.root_rule_name = name parser.root_rule_name = name
return name return name
metaclass_name.sem = metaclass_name_SA
def sequence_SA(parser, node, children):
class SequenceSA(SemanticAction):
def first_pass(self, parser, node, children):
return Sequence(nodes=children[:]) return Sequence(nodes=children[:])
sequence.sem = sequence_SA
def choice_SA(parser, node, children):
class ChoiceSA(SemanticAction):
def first_pass(self, parser, node, children):
return OrderedChoice(nodes=children[:]) return OrderedChoice(nodes=children[:])
choice.sem = choice_SA
def assignment_SA(parser, node, children):
class AssignmentSA(SemanticAction):
def first_pass(self, parser, node, children):
#TODO: Register assignment on metaclass #TODO: Register assignment on metaclass
# Implement semantic for addition
rhs = children[2]
op = children[1]
if op == '+=':
return OneOrMore(nodes=[rhs])
elif op == '*=':
return ZeroOrMore(nodes=[rhs])
elif op == '?=':
return Optional(nodes=[rhs])
else:
return children[2] return children[2]
assignment.sem = assignment_SA
def expr_SA(parser, node, children):
class ExprSA(SemanticAction):
def first_pass(self, parser, node, children):
if children[1] == '?': if children[1] == '?':
return Optional(nodes=[children[0]]) return Optional(nodes=[children[0]])
elif children[1] == '*': elif children[1] == '*':
...@@ -212,38 +241,45 @@ class ExprSA(SemanticAction): ...@@ -212,38 +241,45 @@ class ExprSA(SemanticAction):
else: else:
TextXSemanticError('Unknown repetition operand "{}" at {}'\ TextXSemanticError('Unknown repetition operand "{}" at {}'\
.format(children[1], str(parser.pos_to_linecol(node[1].position)))) .format(children[1], str(parser.pos_to_linecol(node[1].position))))
expr.sem = expr_SA
def str_match_SA(parser, node, children):
class StrMatchSA(SemanticAction):
def first_pass(self, parser, node, children):
return StrMatch(children[0], ignore_case=parser.ignore_case) return StrMatch(children[0], ignore_case=parser.ignore_case)
str_match.sem = str_match_SA
def re_match_SA(parser, node, children):
class REMatchSA(SemanticAction):
def first_pass(self, parser, node, children):
to_match = children[0] to_match = children[0]
print("TOMATCH:", to_match)
regex = RegExMatch(to_match, ignore_case=parser.ignore_case) regex = RegExMatch(to_match, ignore_case=parser.ignore_case)
try:
regex.compile() regex.compile()
except Exception as e:
raise TextXSyntaxError("{} at {}".format(str(e),\
str(parser.pos_to_linecol(node[1].position))))
return regex return regex
re_match.sem = re_match_SA
def rule_match_SA(parser, node, children):
class RuleMatchSA(SemanticAction):
def first_pass(self, parser, node, children):
return RuleMatchCrossRef(str(node), node.position) return RuleMatchCrossRef(str(node), node.position)
rule_match.sem = rule_match_SA
def rule_link_SA(parser, node, children):
# TODO: In analisys during model parsing this will be a link to some other object
# identified by target metaclass ID
return ID
rule_link.sem = rule_link_SA
def list_match_SA(parser, node, children):
if len(children)==1:
return children[0]
else:
match = children[0]
separator = children[1]
return Sequence(nodes=[children[0],
ZeroOrMore(nodes=Sequence(nodes=[separator, match]))])
list_match.sem = list_match_SA
textx_model.sem = TextXModelSA() # Default actions
metaclass.sem = MetaClassSA()
metaclass_name.sem = MetaClassNameSA()
sequence.sem = SequenceSA()
choice.sem = ChoiceSA()
bracketed_choice.sem = SemanticActionSingleChild() bracketed_choice.sem = SemanticActionSingleChild()
expr.sem = ExprSA()
str_match.sem = StrMatchSA()
re_match.sem = REMatchSA()
rule_match.sem = RuleMatchSA()
def get_parser(language_def, ignore_case=True, debug=False): def get_parser(language_def, ignore_case=True, debug=False):
...@@ -254,11 +290,10 @@ def get_parser(language_def, ignore_case=True, debug=False): ...@@ -254,11 +290,10 @@ def get_parser(language_def, ignore_case=True, debug=False):
# This is used during parser construction phase. # This is used during parser construction phase.
parser._metaclasses = {} parser._metaclasses = {}
parser._peg_rules = { parser._peg_rules = {
# Special rules - primitive types 'ID': ID,
'ID': _(r'[^\d\W]\w*\b'), 'INT': INT,
'INT': _(r'[-+]?[0-9]+'), 'FLOAT': FLOAT,
'FLOAT': _(r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?'), 'STRING': STRING,
'STRING': _(r'("[^"]*")|(\'[^\']*\')')
} }
for regex in parser._peg_rules.values(): for regex in parser._peg_rules.values():
regex.compile() regex.compile()
...@@ -266,7 +301,10 @@ def get_parser(language_def, ignore_case=True, debug=False): ...@@ -266,7 +301,10 @@ def get_parser(language_def, ignore_case=True, debug=False):
parser.root_rule_name = None parser.root_rule_name = None
# Parse language description with TextX parser # Parse language description with TextX parser
try:
parse_tree = parser.parse(language_def) parse_tree = parser.parse(language_def)
except NoMatch as e:
raise TextXSyntaxError(str(e))
# Construct new parser based on the given language description. # Construct new parser based on the given language description.
# This parser will have semantic actions in place to create # This parser will have semantic actions in place to create
......
...@@ -49,7 +49,7 @@ experiment { ...@@ -49,7 +49,7 @@ experiment {
# Variable_name, type, optional label # Variable_name, type, optional label
# Type can be: str, int, float or enumerated list # Type can be: str, int, float or enumerated list
subject { subject {
full_name str "First and last name" full_name string "First and last name"
age int age int
gender [Male, Female] "Gender" gender [Male, Female] "Gender"
} }
......
/* /*
This is a TextX definition of pyFlies DSL for Reaction Time test This is a TextX specification of pyFlies DSL for Reaction Time test
experiments definition. experiments definition.
Author: Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com> Author: Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
Copyright: (c) 2014 Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com> Copyright: (c) 2014 Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
...@@ -21,7 +21,7 @@ TestType: ...@@ -21,7 +21,7 @@ TestType:
; ;
TestParam: TestParam:
name=ID TestParamValue label=TestParamLabel? name=ID TestParamValue
; ;
TestParamValue: TestParamValue:
...@@ -34,7 +34,7 @@ TestParamLabel: ...@@ -34,7 +34,7 @@ TestParamLabel:
Block: Block:
Intro|Test|Sequence|Randomize Intro|Test|Sequence|Randomize|Subject
; ;
Experiment: Experiment:
...@@ -65,6 +65,20 @@ Intro: ...@@ -65,6 +65,20 @@ Intro:
/====*/ /====*/
; ;
Subject:
'subject' '{'
attribute+=SubjectAttribute
'}'
;
SubjectAttribute:
name=ID type=("int"|"string"|"float"|Enum) label=STRING?
;
Enum:
'[' values+={ID ','} ']'
;
// Special rule for comments // Special rule for comments
Comment: Comment:
/#.*$/ /#.*$/
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment