diff --git a/.gitignore b/.gitignore index 5d46fe1..4197c8c 100644 --- a/.gitignore +++ b/.gitignore @@ -127,4 +127,10 @@ dmypy.json .pyre/ .DS_Store -config.ini \ No newline at end of file + +*.ini + +.antlr +antlr*.jar + +*.code-workspace \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..25cf19b --- /dev/null +++ b/README.md @@ -0,0 +1,14 @@ +# Prerequest + +- Install dependencies: + +```sh +python3 install -r packages.txt +``` + +# Neo4j client library + +Whole infrastructure to develop client library is placed in the `db` folder. This folder contains [README](db/README.md) file with more information. + +# Web + diff --git a/db/README.md b/db/README.md new file mode 100644 index 0000000..74065fb --- /dev/null +++ b/db/README.md @@ -0,0 +1,14 @@ +## Development + +- Run neo4j is a requirement to run code in development process. [Run official docker](https://neo4j.com/developer/docker/) is easiest way to do that. + +- Create configuration file `config.ini`. See [config.ini](db/config.ini) + +## Codestlye + +- Please style your code aacording to [PEP8](https://www.python.org/dev/peps/pep-0008/) rules + +## Regenerate ANTLR parser + +- [Download](https://www.antlr.org/download.html) antlr generator. +- Run generation script `./scripts/generate_antlr.sh antlr-4.9.2-complete.jar` \ No newline at end of file diff --git a/db/sc/__init__.py b/db/sc/__init__.py index c1e63ed..bfa57e7 100644 --- a/db/sc/__init__.py +++ b/db/sc/__init__.py @@ -1 +1,2 @@ from sc.memory import Memory +from sc.scs import SCsParser diff --git a/db/sc/core/keynodes.py b/db/sc/core/keynodes.py index ad87904..30c912b 100644 --- a/db/sc/core/keynodes.py +++ b/db/sc/core/keynodes.py @@ -1,3 +1,39 @@ -class Keynodes: +class KeynodeNames: + SC_NODE = "sc_node" + SC_LINK = "sc_link" + SC_EDGE = "sc_edge" + SC_ARC = "sc_arc" + CORE_TYPES = [SC_NODE, SC_LINK, SC_EDGE, SC_ARC] + + SC_CONST = "sc_const" + SC_VAR = "sc_var" + + CONST_TYPES = [SC_CONST, SC_VAR] + + SC_NODE_STRUCT = "sc_node_struct" + SC_NODE_TUPLE = "sc_node_tuple" + SC_NODE_ROLE = "sc_node_role" + SC_NODE_NO_ROLE = "sc_node_no_role" + SC_NODE_CLASS = "sc_node_class" + SC_NODE_ABSTRACT = "sc_node_abstract" + SC_NODE_MATERIAL = "sc_node_material" + + NODE_TYPES = [ + SC_NODE_STRUCT, SC_NODE_TUPLE, SC_NODE_ROLE, + SC_NODE_NO_ROLE, SC_NODE_CLASS, SC_NODE_ABSTRACT, + SC_NODE_MATERIAL] + + SC_ARC_PERM = "sc_arc_perm" + SC_ARC_TEMP = "sc_arc_temp" + + ARC_PERM_TYPES = [SC_ARC_PERM, SC_ARC_TEMP] + + SC_ARC_POS = "sc_arc_pos" + SC_ARC_NEG = "sc_arc_neg" + SC_ARC_FUZ = "sc_arc_fuz" + + ARC_POS_TYPES = [SC_ARC_POS, SC_ARC_NEG, SC_ARC_FUZ] + + # common keynodes NREL_SYS_IDTF = "nrel_system_identifier" diff --git a/db/sc/core/transaction/names.py b/db/sc/core/transaction/names.py index fe82e43..74b48ac 100644 --- a/db/sc/core/transaction/names.py +++ b/db/sc/core/transaction/names.py @@ -1,5 +1,5 @@ from sc.core.element import Element -from sc.core.keynodes import Keynodes +from sc.core.keynodes import KeynodeNames from sc.core.keywords import Labels, TypeAttrs from sc.core.transaction.utils import _parse_output_element, _get_label_from_type @@ -32,7 +32,7 @@ class TransactionNamesWrite: def __init__(self, driver: neo4j.Driver, - nrel_sys_idtf: str = Keynodes.NREL_SYS_IDTF) -> None: + nrel_sys_idtf: str = KeynodeNames.NREL_SYS_IDTF) -> None: self._driver = driver self._sys_idtfs = set() self._tasks = [] @@ -58,7 +58,7 @@ def _is_empty(self) -> bool: return len(self._sys_idtfs) == 0 def _make_query(self) -> str: - query = (f"MATCH (l:{Labels.SC_LINK} {{content: '{Keynodes.NREL_SYS_IDTF}', {_const_attr()} }})" + query = (f"MATCH (l:{Labels.SC_LINK} {{content: '{KeynodeNames.NREL_SYS_IDTF}', {_const_attr()} }})" f"<-[__idtf_edge:{Labels.SC_ARC} {{ {TypeAttrs.CONST}: 'CONST' }}]" f"-(__sys_idtf:{Labels.SC_NODE}), \n" f"(:{Labels.SC_EDGE_SOCK} {{edge_id: id(__idtf_edge)}})" @@ -136,7 +136,7 @@ class TransactionNamesRead: def __init__(self, driver: neo4j.Driver, - nrel_sys_idtf: str = Keynodes.NREL_SYS_IDTF) -> None: + nrel_sys_idtf: str = KeynodeNames.NREL_SYS_IDTF) -> None: self._driver = driver self._sys_idtfs = set() @@ -161,7 +161,7 @@ def _is_empty(self) -> bool: def _make_query(self) -> str: - query = (f"MATCH (l:{Labels.SC_LINK} {{ content: '{Keynodes.NREL_SYS_IDTF}', {_const_attr()} }})" + query = (f"MATCH (l:{Labels.SC_LINK} {{ content: '{KeynodeNames.NREL_SYS_IDTF}', {_const_attr()} }})" f"<-[edge:{Labels.SC_ARC} {{ {_const_attr()} }}]" f"-(__sys_idtf:{Labels.SC_NODE}), \n" f"(edge_sock:{Labels.SC_EDGE_SOCK} {{edge_id: id(edge)}})" diff --git a/db/sc/memory.py b/db/sc/memory.py index bc599fb..339513d 100644 --- a/db/sc/memory.py +++ b/db/sc/memory.py @@ -3,6 +3,7 @@ from sc.core.transaction import TransactionWrite, TransactionRead, TransactionNamesWrite, TransactionNamesRead import neo4j +import logging class Memory: @@ -10,14 +11,14 @@ class Memory: def __init__(self, config_path: str): self._config = Config(config_path) - print(f"Connecting to {self._config.db_uri()}") + logging.info(f"Connecting to {self._config.db_uri()}") self._client = Client( self._config.db_uri(), self._config.db_user(), self._config.db_password()) def close(self): - print(f"Close connection to {self._config.db_uri()}") + logging.info(f"Close connection to {self._config.db_uri()}") self._client.close() @property diff --git a/db/sc/scs/__init__.py b/db/sc/scs/__init__.py new file mode 100644 index 0000000..1227a0d --- /dev/null +++ b/db/sc/scs/__init__.py @@ -0,0 +1 @@ +from .parser import SCsParser diff --git a/db/sc/scs/antlr/__init__.py b/db/sc/scs/antlr/__init__.py new file mode 100644 index 0000000..3ee6fc2 --- /dev/null +++ b/db/sc/scs/antlr/__init__.py @@ -0,0 +1,2 @@ +from .scsLexer import scsLexer as SCsLexerAntlr +from .scsParser import scsParser as SCsParserAntlr diff --git a/db/sc/scs/antlr/scs.interp b/db/sc/scs/antlr/scs.interp new file mode 100644 index 0000000..820edcc --- /dev/null +++ b/db/sc/scs/antlr/scs.interp @@ -0,0 +1,155 @@ +token literal names: +null +'_' +'<>' +'<=>' +'_<>' +'_<=>' +'>' +'<' +'=>' +'<=' +'_=>' +'_<=' +'..>' +'<..' +'->' +'<-' +'-|>' +'<|-' +'-/>' +'' +'<~' +'~|>' +'<|~' +'~/>' +'' +'_<..' +'_->' +'_<-' +'_-|>' +'_<|-' +'_-/>' +'_' +'_<~' +'_~|>' +'_<|~' +'_~/>' +'_'=2 +'<=>'=3 +'_<>'=4 +'_<=>'=5 +'>'=6 +'<'=7 +'=>'=8 +'<='=9 +'_=>'=10 +'_<='=11 +'..>'=12 +'<..'=13 +'->'=14 +'<-'=15 +'-|>'=16 +'<|-'=17 +'-/>'=18 +''=20 +'<~'=21 +'~|>'=22 +'<|~'=23 +'~/>'=24 +''=26 +'_<..'=27 +'_->'=28 +'_<-'=29 +'_-|>'=30 +'_<|-'=31 +'_-/>'=32 +'_'=34 +'_<~'=35 +'_~|>'=36 +'_<|~'=37 +'_~/>'=38 +'_' +'<=>' +'_<>' +'_<=>' +'>' +'<' +'=>' +'<=' +'_=>' +'_<=' +'..>' +'<..' +'->' +'<-' +'-|>' +'<|-' +'-/>' +'' +'<~' +'~|>' +'<|~' +'~/>' +'' +'_<..' +'_->' +'_<-' +'_-|>' +'_<|-' +'_-/>' +'_' +'_<~' +'_~|>' +'_<|~' +'_~/>' +'_ 5: + from typing import TextIO +else: + from typing.io import TextIO + + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2>") + buf.write("\u0194\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") + buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") + buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") + buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") + buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") + buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") + buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") + buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") + buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\3\2\3\2\3\3\3\3\3") + buf.write("\3\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6") + buf.write("\3\7\3\7\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3\13") + buf.write("\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3") + buf.write("\16\3\17\3\17\3\17\3\20\3\20\3\20\3\21\3\21\3\21\3\21") + buf.write("\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\24\3\24\3\24") + buf.write("\3\24\3\25\3\25\3\25\3\26\3\26\3\26\3\27\3\27\3\27\3\27") + buf.write("\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\32\3\32\3\32") + buf.write("\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34") + buf.write("\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\37\3\37\3\37") + buf.write("\3\37\3\37\3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3\"\3\"\3\"\3") + buf.write("\"\3\"\3#\3#\3#\3#\3$\3$\3$\3$\3%\3%\3%\3%\3%\3&\3&\3") + buf.write("&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3") + buf.write(")\3*\3*\3+\3+\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\60\3") + buf.write("\61\3\61\3\61\3\62\6\62\u0133\n\62\r\62\16\62\u0134\3") + buf.write("\63\3\63\6\63\u0139\n\63\r\63\16\63\u013a\3\64\3\64\3") + buf.write("\64\3\65\3\65\5\65\u0142\n\65\3\66\3\66\5\66\u0146\n\66") + buf.write("\3\67\3\67\3\67\38\38\38\39\39\39\39\39\79\u0153\n9\f") + buf.write("9\169\u0156\139\39\39\59\u015a\n9\3:\3:\3:\3:\7:\u0160") + buf.write("\n:\f:\16:\u0163\13:\3:\3:\3;\3;\3;\5;\u016a\n;\3<\3<") + buf.write("\3<\3<\3=\3=\3=\3=\7=\u0174\n=\f=\16=\u0177\13=\3=\5=") + buf.write("\u017a\n=\3=\3=\3=\3=\3>\3>\3>\3>\7>\u0184\n>\f>\16>\u0187") + buf.write("\13>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3@\3@\3@\3\u0185\2A\3") + buf.write("\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16") + buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61") + buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*") + buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\64g\2i\2k\2m\65o\66q\67s8") + buf.write("u9w:y;{<}=\177>\3\2\n\7\2\60\60\62;C\\aac|\6\2\62;C\\") + buf.write("aac|\3\2]_\4\2,,]_\3\2$$\5\2\f\f\17\17\u202a\u202b\4\2") + buf.write("\f\f\17\17\5\2\13\f\17\17\"\"\2\u019c\2\3\3\2\2\2\2\5") + buf.write("\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2") + buf.write("\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2") + buf.write("\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2") + buf.write("\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2") + buf.write("\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61") + buf.write("\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2") + buf.write("\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3") + buf.write("\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M") + buf.write("\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2") + buf.write("W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2") + buf.write("\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2m\3\2\2\2\2o\3\2\2") + buf.write("\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2") + buf.write("\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\3\u0081\3\2\2") + buf.write("\2\5\u0083\3\2\2\2\7\u0086\3\2\2\2\t\u008a\3\2\2\2\13") + buf.write("\u008e\3\2\2\2\r\u0093\3\2\2\2\17\u0095\3\2\2\2\21\u0097") + buf.write("\3\2\2\2\23\u009a\3\2\2\2\25\u009d\3\2\2\2\27\u00a1\3") + buf.write("\2\2\2\31\u00a5\3\2\2\2\33\u00a9\3\2\2\2\35\u00ad\3\2") + buf.write("\2\2\37\u00b0\3\2\2\2!\u00b3\3\2\2\2#\u00b7\3\2\2\2%\u00bb") + buf.write("\3\2\2\2\'\u00bf\3\2\2\2)\u00c3\3\2\2\2+\u00c6\3\2\2\2") + buf.write("-\u00c9\3\2\2\2/\u00cd\3\2\2\2\61\u00d1\3\2\2\2\63\u00d5") + buf.write("\3\2\2\2\65\u00d9\3\2\2\2\67\u00de\3\2\2\29\u00e3\3\2") + buf.write("\2\2;\u00e7\3\2\2\2=\u00eb\3\2\2\2?\u00f0\3\2\2\2A\u00f5") + buf.write("\3\2\2\2C\u00fa\3\2\2\2E\u00ff\3\2\2\2G\u0103\3\2\2\2") + buf.write("I\u0107\3\2\2\2K\u010c\3\2\2\2M\u0111\3\2\2\2O\u0116\3") + buf.write("\2\2\2Q\u011b\3\2\2\2S\u011f\3\2\2\2U\u0121\3\2\2\2W\u0123") + buf.write("\3\2\2\2Y\u0125\3\2\2\2[\u0127\3\2\2\2]\u0129\3\2\2\2") + buf.write("_\u012b\3\2\2\2a\u012e\3\2\2\2c\u0132\3\2\2\2e\u0136\3") + buf.write("\2\2\2g\u013c\3\2\2\2i\u0141\3\2\2\2k\u0145\3\2\2\2m\u0147") + buf.write("\3\2\2\2o\u014a\3\2\2\2q\u0159\3\2\2\2s\u015b\3\2\2\2") + buf.write("u\u0169\3\2\2\2w\u016b\3\2\2\2y\u016f\3\2\2\2{\u017f\3") + buf.write("\2\2\2}\u018d\3\2\2\2\177\u0191\3\2\2\2\u0081\u0082\7") + buf.write("a\2\2\u0082\4\3\2\2\2\u0083\u0084\7>\2\2\u0084\u0085\7") + buf.write("@\2\2\u0085\6\3\2\2\2\u0086\u0087\7>\2\2\u0087\u0088\7") + buf.write("?\2\2\u0088\u0089\7@\2\2\u0089\b\3\2\2\2\u008a\u008b\7") + buf.write("a\2\2\u008b\u008c\7>\2\2\u008c\u008d\7@\2\2\u008d\n\3") + buf.write("\2\2\2\u008e\u008f\7a\2\2\u008f\u0090\7>\2\2\u0090\u0091") + buf.write("\7?\2\2\u0091\u0092\7@\2\2\u0092\f\3\2\2\2\u0093\u0094") + buf.write("\7@\2\2\u0094\16\3\2\2\2\u0095\u0096\7>\2\2\u0096\20\3") + buf.write("\2\2\2\u0097\u0098\7?\2\2\u0098\u0099\7@\2\2\u0099\22") + buf.write("\3\2\2\2\u009a\u009b\7>\2\2\u009b\u009c\7?\2\2\u009c\24") + buf.write("\3\2\2\2\u009d\u009e\7a\2\2\u009e\u009f\7?\2\2\u009f\u00a0") + buf.write("\7@\2\2\u00a0\26\3\2\2\2\u00a1\u00a2\7a\2\2\u00a2\u00a3") + buf.write("\7>\2\2\u00a3\u00a4\7?\2\2\u00a4\30\3\2\2\2\u00a5\u00a6") + buf.write("\7\60\2\2\u00a6\u00a7\7\60\2\2\u00a7\u00a8\7@\2\2\u00a8") + buf.write("\32\3\2\2\2\u00a9\u00aa\7>\2\2\u00aa\u00ab\7\60\2\2\u00ab") + buf.write("\u00ac\7\60\2\2\u00ac\34\3\2\2\2\u00ad\u00ae\7/\2\2\u00ae") + buf.write("\u00af\7@\2\2\u00af\36\3\2\2\2\u00b0\u00b1\7>\2\2\u00b1") + buf.write("\u00b2\7/\2\2\u00b2 \3\2\2\2\u00b3\u00b4\7/\2\2\u00b4") + buf.write("\u00b5\7~\2\2\u00b5\u00b6\7@\2\2\u00b6\"\3\2\2\2\u00b7") + buf.write("\u00b8\7>\2\2\u00b8\u00b9\7~\2\2\u00b9\u00ba\7/\2\2\u00ba") + buf.write("$\3\2\2\2\u00bb\u00bc\7/\2\2\u00bc\u00bd\7\61\2\2\u00bd") + buf.write("\u00be\7@\2\2\u00be&\3\2\2\2\u00bf\u00c0\7>\2\2\u00c0") + buf.write("\u00c1\7\61\2\2\u00c1\u00c2\7/\2\2\u00c2(\3\2\2\2\u00c3") + buf.write("\u00c4\7\u0080\2\2\u00c4\u00c5\7@\2\2\u00c5*\3\2\2\2\u00c6") + buf.write("\u00c7\7>\2\2\u00c7\u00c8\7\u0080\2\2\u00c8,\3\2\2\2\u00c9") + buf.write("\u00ca\7\u0080\2\2\u00ca\u00cb\7~\2\2\u00cb\u00cc\7@\2") + buf.write("\2\u00cc.\3\2\2\2\u00cd\u00ce\7>\2\2\u00ce\u00cf\7~\2") + buf.write("\2\u00cf\u00d0\7\u0080\2\2\u00d0\60\3\2\2\2\u00d1\u00d2") + buf.write("\7\u0080\2\2\u00d2\u00d3\7\61\2\2\u00d3\u00d4\7@\2\2\u00d4") + buf.write("\62\3\2\2\2\u00d5\u00d6\7>\2\2\u00d6\u00d7\7\61\2\2\u00d7") + buf.write("\u00d8\7\u0080\2\2\u00d8\64\3\2\2\2\u00d9\u00da\7a\2\2") + buf.write("\u00da\u00db\7\60\2\2\u00db\u00dc\7\60\2\2\u00dc\u00dd") + buf.write("\7@\2\2\u00dd\66\3\2\2\2\u00de\u00df\7a\2\2\u00df\u00e0") + buf.write("\7>\2\2\u00e0\u00e1\7\60\2\2\u00e1\u00e2\7\60\2\2\u00e2") + buf.write("8\3\2\2\2\u00e3\u00e4\7a\2\2\u00e4\u00e5\7/\2\2\u00e5") + buf.write("\u00e6\7@\2\2\u00e6:\3\2\2\2\u00e7\u00e8\7a\2\2\u00e8") + buf.write("\u00e9\7>\2\2\u00e9\u00ea\7/\2\2\u00ea<\3\2\2\2\u00eb") + buf.write("\u00ec\7a\2\2\u00ec\u00ed\7/\2\2\u00ed\u00ee\7~\2\2\u00ee") + buf.write("\u00ef\7@\2\2\u00ef>\3\2\2\2\u00f0\u00f1\7a\2\2\u00f1") + buf.write("\u00f2\7>\2\2\u00f2\u00f3\7~\2\2\u00f3\u00f4\7/\2\2\u00f4") + buf.write("@\3\2\2\2\u00f5\u00f6\7a\2\2\u00f6\u00f7\7/\2\2\u00f7") + buf.write("\u00f8\7\61\2\2\u00f8\u00f9\7@\2\2\u00f9B\3\2\2\2\u00fa") + buf.write("\u00fb\7a\2\2\u00fb\u00fc\7>\2\2\u00fc\u00fd\7\61\2\2") + buf.write("\u00fd\u00fe\7/\2\2\u00feD\3\2\2\2\u00ff\u0100\7a\2\2") + buf.write("\u0100\u0101\7\u0080\2\2\u0101\u0102\7@\2\2\u0102F\3\2") + buf.write("\2\2\u0103\u0104\7a\2\2\u0104\u0105\7>\2\2\u0105\u0106") + buf.write("\7\u0080\2\2\u0106H\3\2\2\2\u0107\u0108\7a\2\2\u0108\u0109") + buf.write("\7\u0080\2\2\u0109\u010a\7~\2\2\u010a\u010b\7@\2\2\u010b") + buf.write("J\3\2\2\2\u010c\u010d\7a\2\2\u010d\u010e\7>\2\2\u010e") + buf.write("\u010f\7~\2\2\u010f\u0110\7\u0080\2\2\u0110L\3\2\2\2\u0111") + buf.write("\u0112\7a\2\2\u0112\u0113\7\u0080\2\2\u0113\u0114\7\61") + buf.write("\2\2\u0114\u0115\7@\2\2\u0115N\3\2\2\2\u0116\u0117\7a") + buf.write("\2\2\u0117\u0118\7>\2\2\u0118\u0119\7\61\2\2\u0119\u011a") + buf.write("\7\u0080\2\2\u011aP\3\2\2\2\u011b\u011c\7\60\2\2\u011c") + buf.write("\u011d\7\60\2\2\u011d\u011e\7\60\2\2\u011eR\3\2\2\2\u011f") + buf.write("\u0120\7?\2\2\u0120T\3\2\2\2\u0121\u0122\7*\2\2\u0122") + buf.write("V\3\2\2\2\u0123\u0124\7+\2\2\u0124X\3\2\2\2\u0125\u0126") + buf.write("\7=\2\2\u0126Z\3\2\2\2\u0127\u0128\7}\2\2\u0128\\\3\2") + buf.write("\2\2\u0129\u012a\7\177\2\2\u012a^\3\2\2\2\u012b\u012c") + buf.write("\7*\2\2\u012c\u012d\7,\2\2\u012d`\3\2\2\2\u012e\u012f") + buf.write("\7,\2\2\u012f\u0130\7+\2\2\u0130b\3\2\2\2\u0131\u0133") + buf.write("\t\2\2\2\u0132\u0131\3\2\2\2\u0133\u0134\3\2\2\2\u0134") + buf.write("\u0132\3\2\2\2\u0134\u0135\3\2\2\2\u0135d\3\2\2\2\u0136") + buf.write("\u0138\7B\2\2\u0137\u0139\t\3\2\2\u0138\u0137\3\2\2\2") + buf.write("\u0139\u013a\3\2\2\2\u013a\u0138\3\2\2\2\u013a\u013b\3") + buf.write("\2\2\2\u013bf\3\2\2\2\u013c\u013d\7^\2\2\u013d\u013e\4") + buf.write("]_\2\u013eh\3\2\2\2\u013f\u0142\5g\64\2\u0140\u0142\n") + buf.write("\4\2\2\u0141\u013f\3\2\2\2\u0141\u0140\3\2\2\2\u0142j") + buf.write("\3\2\2\2\u0143\u0146\5g\64\2\u0144\u0146\n\5\2\2\u0145") + buf.write("\u0143\3\2\2\2\u0145\u0144\3\2\2\2\u0146l\3\2\2\2\u0147") + buf.write("\u0148\7]\2\2\u0148\u0149\7,\2\2\u0149n\3\2\2\2\u014a") + buf.write("\u014b\7,\2\2\u014b\u014c\7_\2\2\u014cp\3\2\2\2\u014d") + buf.write("\u014e\7]\2\2\u014e\u015a\7_\2\2\u014f\u0150\7]\2\2\u0150") + buf.write("\u0154\5k\66\2\u0151\u0153\5i\65\2\u0152\u0151\3\2\2\2") + buf.write("\u0153\u0156\3\2\2\2\u0154\u0152\3\2\2\2\u0154\u0155\3") + buf.write("\2\2\2\u0155\u0157\3\2\2\2\u0156\u0154\3\2\2\2\u0157\u0158") + buf.write("\7_\2\2\u0158\u015a\3\2\2\2\u0159\u014d\3\2\2\2\u0159") + buf.write("\u014f\3\2\2\2\u015ar\3\2\2\2\u015b\u0161\7$\2\2\u015c") + buf.write("\u0160\n\6\2\2\u015d\u015e\7^\2\2\u015e\u0160\7$\2\2\u015f") + buf.write("\u015c\3\2\2\2\u015f\u015d\3\2\2\2\u0160\u0163\3\2\2\2") + buf.write("\u0161\u015f\3\2\2\2\u0161\u0162\3\2\2\2\u0162\u0164\3") + buf.write("\2\2\2\u0163\u0161\3\2\2\2\u0164\u0165\7$\2\2\u0165t\3") + buf.write("\2\2\2\u0166\u016a\7<\2\2\u0167\u0168\7<\2\2\u0168\u016a") + buf.write("\7<\2\2\u0169\u0166\3\2\2\2\u0169\u0167\3\2\2\2\u016a") + buf.write("v\3\2\2\2\u016b\u016c\t\7\2\2\u016c\u016d\3\2\2\2\u016d") + buf.write("\u016e\b<\2\2\u016ex\3\2\2\2\u016f\u0170\7\61\2\2\u0170") + buf.write("\u0171\7\61\2\2\u0171\u0175\3\2\2\2\u0172\u0174\n\b\2") + buf.write("\2\u0173\u0172\3\2\2\2\u0174\u0177\3\2\2\2\u0175\u0173") + buf.write("\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0179\3\2\2\2\u0177") + buf.write("\u0175\3\2\2\2\u0178\u017a\7\17\2\2\u0179\u0178\3\2\2") + buf.write("\2\u0179\u017a\3\2\2\2\u017a\u017b\3\2\2\2\u017b\u017c") + buf.write("\7\f\2\2\u017c\u017d\3\2\2\2\u017d\u017e\b=\2\2\u017e") + buf.write("z\3\2\2\2\u017f\u0180\7\61\2\2\u0180\u0181\7,\2\2\u0181") + buf.write("\u0185\3\2\2\2\u0182\u0184\13\2\2\2\u0183\u0182\3\2\2") + buf.write("\2\u0184\u0187\3\2\2\2\u0185\u0186\3\2\2\2\u0185\u0183") + buf.write("\3\2\2\2\u0186\u0188\3\2\2\2\u0187\u0185\3\2\2\2\u0188") + buf.write("\u0189\7,\2\2\u0189\u018a\7\61\2\2\u018a\u018b\3\2\2\2") + buf.write("\u018b\u018c\b>\2\2\u018c|\3\2\2\2\u018d\u018e\t\t\2\2") + buf.write("\u018e\u018f\3\2\2\2\u018f\u0190\b?\2\2\u0190~\3\2\2\2") + buf.write("\u0191\u0192\7=\2\2\u0192\u0193\7=\2\2\u0193\u0080\3\2") + buf.write("\2\2\17\2\u0134\u013a\u0141\u0145\u0154\u0159\u015f\u0161") + buf.write("\u0169\u0175\u0179\u0185\3\2\3\2") + return buf.getvalue() + + +class scsLexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + T__0 = 1 + T__1 = 2 + T__2 = 3 + T__3 = 4 + T__4 = 5 + T__5 = 6 + T__6 = 7 + T__7 = 8 + T__8 = 9 + T__9 = 10 + T__10 = 11 + T__11 = 12 + T__12 = 13 + T__13 = 14 + T__14 = 15 + T__15 = 16 + T__16 = 17 + T__17 = 18 + T__18 = 19 + T__19 = 20 + T__20 = 21 + T__21 = 22 + T__22 = 23 + T__23 = 24 + T__24 = 25 + T__25 = 26 + T__26 = 27 + T__27 = 28 + T__28 = 29 + T__29 = 30 + T__30 = 31 + T__31 = 32 + T__32 = 33 + T__33 = 34 + T__34 = 35 + T__35 = 36 + T__36 = 37 + T__37 = 38 + T__38 = 39 + T__39 = 40 + T__40 = 41 + T__41 = 42 + T__42 = 43 + T__43 = 44 + T__44 = 45 + T__45 = 46 + T__46 = 47 + T__47 = 48 + ID_SYSTEM = 49 + ALIAS_SYMBOLS = 50 + CONTOUR_BEGIN = 51 + CONTOUR_END = 52 + CONTENT_BODY = 53 + LINK = 54 + EDGE_ATTR = 55 + LINE_TERMINATOR = 56 + LINE_COMMENT = 57 + MULTINE_COMMENT = 58 + WS = 59 + SENTENCE_SEP = 60 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "", + "'_'", "'<>'", "'<=>'", "'_<>'", "'_<=>'", "'>'", "'<'", "'=>'", + "'<='", "'_=>'", "'_<='", "'..>'", "'<..'", "'->'", "'<-'", + "'-|>'", "'<|-'", "'-/>'", "''", "'<~'", "'~|>'", + "'<|~'", "'~/>'", "''", "'_<..'", "'_->'", "'_<-'", + "'_-|>'", "'_<|-'", "'_-/>'", "'_'", "'_<~'", "'_~|>'", + "'_<|~'", "'_~/>'", "'_", + "ID_SYSTEM", "ALIAS_SYMBOLS", "CONTOUR_BEGIN", "CONTOUR_END", + "CONTENT_BODY", "LINK", "EDGE_ATTR", "LINE_TERMINATOR", "LINE_COMMENT", + "MULTINE_COMMENT", "WS", "SENTENCE_SEP" ] + + ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", + "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13", + "T__14", "T__15", "T__16", "T__17", "T__18", "T__19", + "T__20", "T__21", "T__22", "T__23", "T__24", "T__25", + "T__26", "T__27", "T__28", "T__29", "T__30", "T__31", + "T__32", "T__33", "T__34", "T__35", "T__36", "T__37", + "T__38", "T__39", "T__40", "T__41", "T__42", "T__43", + "T__44", "T__45", "T__46", "T__47", "ID_SYSTEM", "ALIAS_SYMBOLS", + "CONTENT_ESCAPED", "CONTENT_SYBMOL", "CONTENT_SYBMOL_FIRST_END", + "CONTOUR_BEGIN", "CONTOUR_END", "CONTENT_BODY", "LINK", + "EDGE_ATTR", "LINE_TERMINATOR", "LINE_COMMENT", "MULTINE_COMMENT", + "WS", "SENTENCE_SEP" ] + + grammarFileName = "scs.g4" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.9.2") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None + + diff --git a/db/sc/scs/antlr/scsLexer.tokens b/db/sc/scs/antlr/scsLexer.tokens new file mode 100644 index 0000000..6aebcb7 --- /dev/null +++ b/db/sc/scs/antlr/scsLexer.tokens @@ -0,0 +1,111 @@ +T__0=1 +T__1=2 +T__2=3 +T__3=4 +T__4=5 +T__5=6 +T__6=7 +T__7=8 +T__8=9 +T__9=10 +T__10=11 +T__11=12 +T__12=13 +T__13=14 +T__14=15 +T__15=16 +T__16=17 +T__17=18 +T__18=19 +T__19=20 +T__20=21 +T__21=22 +T__22=23 +T__23=24 +T__24=25 +T__25=26 +T__26=27 +T__27=28 +T__28=29 +T__29=30 +T__30=31 +T__31=32 +T__32=33 +T__33=34 +T__34=35 +T__35=36 +T__36=37 +T__37=38 +T__38=39 +T__39=40 +T__40=41 +T__41=42 +T__42=43 +T__43=44 +T__44=45 +T__45=46 +T__46=47 +T__47=48 +ID_SYSTEM=49 +ALIAS_SYMBOLS=50 +CONTOUR_BEGIN=51 +CONTOUR_END=52 +CONTENT_BODY=53 +LINK=54 +EDGE_ATTR=55 +LINE_TERMINATOR=56 +LINE_COMMENT=57 +MULTINE_COMMENT=58 +WS=59 +SENTENCE_SEP=60 +'_'=1 +'<>'=2 +'<=>'=3 +'_<>'=4 +'_<=>'=5 +'>'=6 +'<'=7 +'=>'=8 +'<='=9 +'_=>'=10 +'_<='=11 +'..>'=12 +'<..'=13 +'->'=14 +'<-'=15 +'-|>'=16 +'<|-'=17 +'-/>'=18 +''=20 +'<~'=21 +'~|>'=22 +'<|~'=23 +'~/>'=24 +''=26 +'_<..'=27 +'_->'=28 +'_<-'=29 +'_-|>'=30 +'_<|-'=31 +'_-/>'=32 +'_'=34 +'_<~'=35 +'_~|>'=36 +'_<|~'=37 +'_~/>'=38 +'_ 5: + from typing import TextIO +else: + from typing.io import TextIO + + +from sc.scs.types import * +from enum import Enum + +def create_token_context(ctx: any) -> TokenContext: + return TokenContext(line=ctx.line, column=ctx.column, text=ctx.text) + +class ConnectorType: + ARC = 0 + EDGE = 1 + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3>") + buf.write("\u00e2\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") + buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") + buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31") + buf.write("\t\31\3\2\5\2\64\n\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\7\3=") + buf.write("\n\3\f\3\16\3@\13\3\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3") + buf.write("\5\3\6\3\6\3\6\3\6\3\6\3\6\5\6Q\n\6\3\7\7\7T\n\7\f\7\16") + buf.write("\7W\13\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\5\t`\n\t\3\n\3\n") + buf.write("\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3") + buf.write("\r\5\rq\n\r\3\r\3\r\3\r\3\r\3\16\5\16x\n\16\3\16\3\16") + buf.write("\3\16\3\17\3\17\3\17\7\17\u0080\n\17\f\17\16\17\u0083") + buf.write("\13\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3") + buf.write("\21\3\21\5\21\u0090\n\21\3\22\3\22\3\22\3\23\3\23\3\23") + buf.write("\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23") + buf.write("\3\23\3\23\3\23\3\23\5\23\u00a7\n\23\3\24\3\24\3\24\5") + buf.write("\24\u00ac\n\24\3\24\3\24\3\24\3\24\5\24\u00b2\n\24\7\24") + buf.write("\u00b4\n\24\f\24\16\24\u00b7\13\24\3\25\3\25\5\25\u00bb") + buf.write("\n\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\6\26\u00c4\n") + buf.write("\26\r\26\16\26\u00c5\3\26\3\26\3\27\3\27\5\27\u00cc\n") + buf.write("\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\7\30\u00d6") + buf.write("\n\30\f\30\16\30\u00d9\13\30\3\31\3\31\3\31\6\31\u00de") + buf.write("\n\31\r\31\16\31\u00df\3\31\2\2\32\2\4\6\b\n\f\16\20\22") + buf.write("\24\26\30\32\34\36 \"$&(*,.\60\2\5\3\2\4\r\3\2\16)\4\2") + buf.write("**\63\63\2\u00df\2\63\3\2\2\2\48\3\2\2\2\6D\3\2\2\2\b") + buf.write("G\3\2\2\2\nP\3\2\2\2\fU\3\2\2\2\16Z\3\2\2\2\20_\3\2\2") + buf.write("\2\22a\3\2\2\2\24d\3\2\2\2\26g\3\2\2\2\30l\3\2\2\2\32") + buf.write("w\3\2\2\2\34|\3\2\2\2\36\u0084\3\2\2\2 \u008f\3\2\2\2") + buf.write("\"\u0091\3\2\2\2$\u00a6\3\2\2\2&\u00a8\3\2\2\2(\u00b8") + buf.write("\3\2\2\2*\u00bf\3\2\2\2,\u00c9\3\2\2\2.\u00d1\3\2\2\2") + buf.write("\60\u00dd\3\2\2\2\62\64\7\3\2\2\63\62\3\2\2\2\63\64\3") + buf.write("\2\2\2\64\65\3\2\2\2\65\66\7\67\2\2\66\67\b\2\1\2\67\3") + buf.write("\3\2\2\289\7\65\2\29:\6\3\2\2:>\b\3\1\2;=\5\16\b\2<;\3") + buf.write("\2\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?A\3\2\2\2@>\3\2\2") + buf.write("\2AB\7\66\2\2BC\b\3\1\2C\5\3\2\2\2DE\t\2\2\2EF\b\4\1\2") + buf.write("F\7\3\2\2\2GH\t\3\2\2HI\b\5\1\2I\t\3\2\2\2JK\5\6\4\2K") + buf.write("L\b\6\1\2LQ\3\2\2\2MN\5\b\5\2NO\b\6\1\2OQ\3\2\2\2PJ\3") + buf.write("\2\2\2PM\3\2\2\2Q\13\3\2\2\2RT\5\16\b\2SR\3\2\2\2TW\3") + buf.write("\2\2\2US\3\2\2\2UV\3\2\2\2VX\3\2\2\2WU\3\2\2\2XY\7\2\2") + buf.write("\3Y\r\3\2\2\2Z[\5\20\t\2[\\\7>\2\2\\\17\3\2\2\2]`\5\26") + buf.write("\f\2^`\5.\30\2_]\3\2\2\2_^\3\2\2\2`\21\3\2\2\2ab\7\64") + buf.write("\2\2bc\b\n\1\2c\23\3\2\2\2de\t\4\2\2ef\b\13\1\2f\25\3") + buf.write("\2\2\2gh\7\64\2\2hi\7+\2\2ij\5$\23\2jk\b\f\1\2k\27\3\2") + buf.write("\2\2lm\7,\2\2mn\5 \21\2np\5\n\6\2oq\5\60\31\2po\3\2\2") + buf.write("\2pq\3\2\2\2qr\3\2\2\2rs\5 \21\2st\7-\2\2tu\b\r\1\2u\31") + buf.write("\3\2\2\2vx\5\60\31\2wv\3\2\2\2wx\3\2\2\2xy\3\2\2\2yz\5") + buf.write("$\23\2z{\b\16\1\2{\33\3\2\2\2|\u0081\5\32\16\2}~\7.\2") + buf.write("\2~\u0080\5\32\16\2\177}\3\2\2\2\u0080\u0083\3\2\2\2\u0081") + buf.write("\177\3\2\2\2\u0081\u0082\3\2\2\2\u0082\35\3\2\2\2\u0083") + buf.write("\u0081\3\2\2\2\u0084\u0085\7/\2\2\u0085\u0086\b\20\1\2") + buf.write("\u0086\u0087\5\34\17\2\u0087\u0088\7\60\2\2\u0088\37\3") + buf.write("\2\2\2\u0089\u008a\5\22\n\2\u008a\u008b\b\21\1\2\u008b") + buf.write("\u0090\3\2\2\2\u008c\u008d\5\24\13\2\u008d\u008e\b\21") + buf.write("\1\2\u008e\u0090\3\2\2\2\u008f\u0089\3\2\2\2\u008f\u008c") + buf.write("\3\2\2\2\u0090!\3\2\2\2\u0091\u0092\78\2\2\u0092\u0093") + buf.write("\b\22\1\2\u0093#\3\2\2\2\u0094\u0095\5 \21\2\u0095\u0096") + buf.write("\b\23\1\2\u0096\u00a7\3\2\2\2\u0097\u0098\5\30\r\2\u0098") + buf.write("\u0099\b\23\1\2\u0099\u00a7\3\2\2\2\u009a\u009b\5\36\20") + buf.write("\2\u009b\u009c\b\23\1\2\u009c\u00a7\3\2\2\2\u009d\u009e") + buf.write("\5\4\3\2\u009e\u009f\b\23\1\2\u009f\u00a7\3\2\2\2\u00a0") + buf.write("\u00a1\5\2\2\2\u00a1\u00a2\b\23\1\2\u00a2\u00a7\3\2\2") + buf.write("\2\u00a3\u00a4\5\"\22\2\u00a4\u00a5\b\23\1\2\u00a5\u00a7") + buf.write("\3\2\2\2\u00a6\u0094\3\2\2\2\u00a6\u0097\3\2\2\2\u00a6") + buf.write("\u009a\3\2\2\2\u00a6\u009d\3\2\2\2\u00a6\u00a0\3\2\2\2") + buf.write("\u00a6\u00a3\3\2\2\2\u00a7%\3\2\2\2\u00a8\u00a9\5$\23") + buf.write("\2\u00a9\u00ab\b\24\1\2\u00aa\u00ac\5*\26\2\u00ab\u00aa") + buf.write("\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00b5\3\2\2\2\u00ad") + buf.write("\u00ae\7.\2\2\u00ae\u00af\5$\23\2\u00af\u00b1\b\24\1\2") + buf.write("\u00b0\u00b2\5*\26\2\u00b1\u00b0\3\2\2\2\u00b1\u00b2\3") + buf.write("\2\2\2\u00b2\u00b4\3\2\2\2\u00b3\u00ad\3\2\2\2\u00b4\u00b7") + buf.write("\3\2\2\2\u00b5\u00b3\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b6") + buf.write("\'\3\2\2\2\u00b7\u00b5\3\2\2\2\u00b8\u00ba\5\n\6\2\u00b9") + buf.write("\u00bb\5\60\31\2\u00ba\u00b9\3\2\2\2\u00ba\u00bb\3\2\2") + buf.write("\2\u00bb\u00bc\3\2\2\2\u00bc\u00bd\5&\24\2\u00bd\u00be") + buf.write("\b\25\1\2\u00be)\3\2\2\2\u00bf\u00c3\7\61\2\2\u00c0\u00c1") + buf.write("\5(\25\2\u00c1\u00c2\7>\2\2\u00c2\u00c4\3\2\2\2\u00c3") + buf.write("\u00c0\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c3\3\2\2\2") + buf.write("\u00c5\u00c6\3\2\2\2\u00c6\u00c7\3\2\2\2\u00c7\u00c8\7") + buf.write("\62\2\2\u00c8+\3\2\2\2\u00c9\u00cb\5\n\6\2\u00ca\u00cc") + buf.write("\5\60\31\2\u00cb\u00ca\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc") + buf.write("\u00cd\3\2\2\2\u00cd\u00ce\5&\24\2\u00ce\u00cf\3\2\2\2") + buf.write("\u00cf\u00d0\b\27\1\2\u00d0-\3\2\2\2\u00d1\u00d2\5$\23") + buf.write("\2\u00d2\u00d7\5,\27\2\u00d3\u00d4\7.\2\2\u00d4\u00d6") + buf.write("\5,\27\2\u00d5\u00d3\3\2\2\2\u00d6\u00d9\3\2\2\2\u00d7") + buf.write("\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8/\3\2\2\2\u00d9") + buf.write("\u00d7\3\2\2\2\u00da\u00db\7\63\2\2\u00db\u00dc\79\2\2") + buf.write("\u00dc\u00de\b\31\1\2\u00dd\u00da\3\2\2\2\u00de\u00df") + buf.write("\3\2\2\2\u00df\u00dd\3\2\2\2\u00df\u00e0\3\2\2\2\u00e0") + buf.write("\61\3\2\2\2\24\63>PU_pw\u0081\u008f\u00a6\u00ab\u00b1") + buf.write("\u00b5\u00ba\u00c5\u00cb\u00d7\u00df") + return buf.getvalue() + + +class scsParser ( Parser ): + + grammarFileName = "scs.g4" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "", "'_'", "'<>'", "'<=>'", "'_<>'", "'_<=>'", + "'>'", "'<'", "'=>'", "'<='", "'_=>'", "'_<='", "'..>'", + "'<..'", "'->'", "'<-'", "'-|>'", "'<|-'", "'-/>'", + "''", "'<~'", "'~|>'", "'<|~'", "'~/>'", + "''", "'_<..'", "'_->'", "'_<-'", "'_-|>'", + "'_<|-'", "'_-/>'", "'_'", "'_<~'", "'_~|>'", + "'_<|~'", "'_~/>'", "'_", + "", "'[*'", "'*]'", "", "", + "", "", "", "", + "", "';;'" ] + + symbolicNames = [ "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "ID_SYSTEM", "ALIAS_SYMBOLS", "CONTOUR_BEGIN", + "CONTOUR_END", "CONTENT_BODY", "LINK", "EDGE_ATTR", + "LINE_TERMINATOR", "LINE_COMMENT", "MULTINE_COMMENT", + "WS", "SENTENCE_SEP" ] + + RULE_content = 0 + RULE_contour = 1 + RULE_connector_edge = 2 + RULE_connector_arc = 3 + RULE_connector = 4 + RULE_syntax = 5 + RULE_sentence_wrap = 6 + RULE_sentence = 7 + RULE_ifdf_alias = 8 + RULE_idtf_system = 9 + RULE_sentence_assign = 10 + RULE_idtf_edge = 11 + RULE_idtf_set_item = 12 + RULE_idtf_set_item_list = 13 + RULE_idtf_set = 14 + RULE_idtf_atomic = 15 + RULE_idtf_url = 16 + RULE_idtf_common = 17 + RULE_idtf_list = 18 + RULE_internal_sentence = 19 + RULE_internal_sentence_list = 20 + RULE_sentence_lvl_4_list_item = 21 + RULE_sentence_lvl_common = 22 + RULE_attr_list = 23 + + ruleNames = [ "content", "contour", "connector_edge", "connector_arc", + "connector", "syntax", "sentence_wrap", "sentence", "ifdf_alias", + "idtf_system", "sentence_assign", "idtf_edge", "idtf_set_item", + "idtf_set_item_list", "idtf_set", "idtf_atomic", "idtf_url", + "idtf_common", "idtf_list", "internal_sentence", "internal_sentence_list", + "sentence_lvl_4_list_item", "sentence_lvl_common", "attr_list" ] + + EOF = Token.EOF + T__0=1 + T__1=2 + T__2=3 + T__3=4 + T__4=5 + T__5=6 + T__6=7 + T__7=8 + T__8=9 + T__9=10 + T__10=11 + T__11=12 + T__12=13 + T__13=14 + T__14=15 + T__15=16 + T__16=17 + T__17=18 + T__18=19 + T__19=20 + T__20=21 + T__21=22 + T__22=23 + T__23=24 + T__24=25 + T__25=26 + T__26=27 + T__27=28 + T__28=29 + T__29=30 + T__30=31 + T__31=32 + T__32=33 + T__33=34 + T__34=35 + T__35=36 + T__36=37 + T__37=38 + T__38=39 + T__39=40 + T__40=41 + T__41=42 + T__42=43 + T__43=44 + T__44=45 + T__45=46 + T__46=47 + T__47=48 + ID_SYSTEM=49 + ALIAS_SYMBOLS=50 + CONTOUR_BEGIN=51 + CONTOUR_END=52 + CONTENT_BODY=53 + LINK=54 + EDGE_ATTR=55 + LINE_TERMINATOR=56 + LINE_COMMENT=57 + MULTINE_COMMENT=58 + WS=59 + SENTENCE_SEP=60 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.9.2") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + + class ContentContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self.v = None # Token + self._CONTENT_BODY = None # Token + + def CONTENT_BODY(self): + return self.getToken(scsParser.CONTENT_BODY, 0) + + def getRuleIndex(self): + return scsParser.RULE_content + + + + + def content(self): + + localctx = scsParser.ContentContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_content) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 49 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==scsParser.T__0: + self.state = 48 + localctx.v = self.match(scsParser.T__0) + + + self.state = 51 + localctx._CONTENT_BODY = self.match(scsParser.CONTENT_BODY) + + token_context = create_token_context(localctx._CONTENT_BODY) + localctx.el = self._impl.create_link(token_context, (None if localctx._CONTENT_BODY is None else localctx._CONTENT_BODY.text)[1:-1], Link.Type.STRING, localctx.v is not None) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ContourContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self._CONTOUR_BEGIN = None # Token + + def CONTOUR_BEGIN(self): + return self.getToken(scsParser.CONTOUR_BEGIN, 0) + + def CONTOUR_END(self): + return self.getToken(scsParser.CONTOUR_END, 0) + + def sentence_wrap(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Sentence_wrapContext) + else: + return self.getTypedRuleContext(scsParser.Sentence_wrapContext,i) + + + def getRuleIndex(self): + return scsParser.RULE_contour + + + + + def contour(self): + + localctx = scsParser.ContourContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_contour) + count = 1 + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 54 + localctx._CONTOUR_BEGIN = self.match(scsParser.CONTOUR_BEGIN) + self.state = 55 + if not count > 0: + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "count > 0") + self._impl.start_contour() + + self.state = 60 + self._errHandler.sync(self) + _la = self._input.LA(1) + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << scsParser.T__0) | (1 << scsParser.T__39) | (1 << scsParser.T__41) | (1 << scsParser.T__44) | (1 << scsParser.ID_SYSTEM) | (1 << scsParser.ALIAS_SYMBOLS) | (1 << scsParser.CONTOUR_BEGIN) | (1 << scsParser.CONTENT_BODY) | (1 << scsParser.LINK))) != 0): + self.state = 57 + self.sentence_wrap() + self.state = 62 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 63 + self.match(scsParser.CONTOUR_END) + + count -= 1 + if count == 0: + contour = self._impl.create_node(create_token_context(localctx._CONTOUR_BEGIN)) + self._impl.end_contour(contour) + localctx.el = contour + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Connector_edgeContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self.symbol = None # Token + + + def getRuleIndex(self): + return scsParser.RULE_connector_edge + + + + + def connector_edge(self): + + localctx = scsParser.Connector_edgeContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_connector_edge) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 66 + localctx.symbol = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << scsParser.T__1) | (1 << scsParser.T__2) | (1 << scsParser.T__3) | (1 << scsParser.T__4) | (1 << scsParser.T__5) | (1 << scsParser.T__6) | (1 << scsParser.T__7) | (1 << scsParser.T__8) | (1 << scsParser.T__9) | (1 << scsParser.T__10))) != 0)): + localctx.symbol = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + localctx.el = self._impl.create_edge(create_token_context(localctx.symbol), (None if localctx.symbol is None else localctx.symbol.text)) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Connector_arcContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self.symbol = None # Token + + + def getRuleIndex(self): + return scsParser.RULE_connector_arc + + + + + def connector_arc(self): + + localctx = scsParser.Connector_arcContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_connector_arc) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 69 + localctx.symbol = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << scsParser.T__11) | (1 << scsParser.T__12) | (1 << scsParser.T__13) | (1 << scsParser.T__14) | (1 << scsParser.T__15) | (1 << scsParser.T__16) | (1 << scsParser.T__17) | (1 << scsParser.T__18) | (1 << scsParser.T__19) | (1 << scsParser.T__20) | (1 << scsParser.T__21) | (1 << scsParser.T__22) | (1 << scsParser.T__23) | (1 << scsParser.T__24) | (1 << scsParser.T__25) | (1 << scsParser.T__26) | (1 << scsParser.T__27) | (1 << scsParser.T__28) | (1 << scsParser.T__29) | (1 << scsParser.T__30) | (1 << scsParser.T__31) | (1 << scsParser.T__32) | (1 << scsParser.T__33) | (1 << scsParser.T__34) | (1 << scsParser.T__35) | (1 << scsParser.T__36) | (1 << scsParser.T__37) | (1 << scsParser.T__38))) != 0)): + localctx.symbol = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + localctx.el = self._impl.create_arc(create_token_context(localctx.symbol), (None if localctx.symbol is None else localctx.symbol.text)) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ConnectorContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self._connector_edge = None # Connector_edgeContext + self._connector_arc = None # Connector_arcContext + + def connector_edge(self): + return self.getTypedRuleContext(scsParser.Connector_edgeContext,0) + + + def connector_arc(self): + return self.getTypedRuleContext(scsParser.Connector_arcContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_connector + + + + + def connector(self): + + localctx = scsParser.ConnectorContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_connector) + try: + self.state = 78 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [scsParser.T__1, scsParser.T__2, scsParser.T__3, scsParser.T__4, scsParser.T__5, scsParser.T__6, scsParser.T__7, scsParser.T__8, scsParser.T__9, scsParser.T__10]: + self.enterOuterAlt(localctx, 1) + self.state = 72 + localctx._connector_edge = self.connector_edge() + localctx.el = localctx._connector_edge.el + pass + elif token in [scsParser.T__11, scsParser.T__12, scsParser.T__13, scsParser.T__14, scsParser.T__15, scsParser.T__16, scsParser.T__17, scsParser.T__18, scsParser.T__19, scsParser.T__20, scsParser.T__21, scsParser.T__22, scsParser.T__23, scsParser.T__24, scsParser.T__25, scsParser.T__26, scsParser.T__27, scsParser.T__28, scsParser.T__29, scsParser.T__30, scsParser.T__31, scsParser.T__32, scsParser.T__33, scsParser.T__34, scsParser.T__35, scsParser.T__36, scsParser.T__37, scsParser.T__38]: + self.enterOuterAlt(localctx, 2) + self.state = 75 + localctx._connector_arc = self.connector_arc() + localctx.el = localctx._connector_arc.el + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SyntaxContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def EOF(self): + return self.getToken(scsParser.EOF, 0) + + def sentence_wrap(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Sentence_wrapContext) + else: + return self.getTypedRuleContext(scsParser.Sentence_wrapContext,i) + + + def getRuleIndex(self): + return scsParser.RULE_syntax + + + + + def syntax(self): + + localctx = scsParser.SyntaxContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_syntax) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 83 + self._errHandler.sync(self) + _la = self._input.LA(1) + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << scsParser.T__0) | (1 << scsParser.T__39) | (1 << scsParser.T__41) | (1 << scsParser.T__44) | (1 << scsParser.ID_SYSTEM) | (1 << scsParser.ALIAS_SYMBOLS) | (1 << scsParser.CONTOUR_BEGIN) | (1 << scsParser.CONTENT_BODY) | (1 << scsParser.LINK))) != 0): + self.state = 80 + self.sentence_wrap() + self.state = 85 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 86 + self.match(scsParser.EOF) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Sentence_wrapContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def sentence(self): + return self.getTypedRuleContext(scsParser.SentenceContext,0) + + + def SENTENCE_SEP(self): + return self.getToken(scsParser.SENTENCE_SEP, 0) + + def getRuleIndex(self): + return scsParser.RULE_sentence_wrap + + + + + def sentence_wrap(self): + + localctx = scsParser.Sentence_wrapContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_sentence_wrap) + try: + self.enterOuterAlt(localctx, 1) + self.state = 88 + self.sentence() + self.state = 89 + self.match(scsParser.SENTENCE_SEP) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SentenceContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def sentence_assign(self): + return self.getTypedRuleContext(scsParser.Sentence_assignContext,0) + + + def sentence_lvl_common(self): + return self.getTypedRuleContext(scsParser.Sentence_lvl_commonContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_sentence + + + + + def sentence(self): + + localctx = scsParser.SentenceContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_sentence) + try: + self.state = 93 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,4,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 91 + self.sentence_assign() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 92 + self.sentence_lvl_common() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Ifdf_aliasContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self._ALIAS_SYMBOLS = None # Token + + def ALIAS_SYMBOLS(self): + return self.getToken(scsParser.ALIAS_SYMBOLS, 0) + + def getRuleIndex(self): + return scsParser.RULE_ifdf_alias + + + + + def ifdf_alias(self): + + localctx = scsParser.Ifdf_aliasContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_ifdf_alias) + try: + self.enterOuterAlt(localctx, 1) + self.state = 95 + localctx._ALIAS_SYMBOLS = self.match(scsParser.ALIAS_SYMBOLS) + + localctx.el = self._impl.create_alias(create_token_context(localctx._ALIAS_SYMBOLS)) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_systemContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self.value = None # Token + + def ID_SYSTEM(self): + return self.getToken(scsParser.ID_SYSTEM, 0) + + def getRuleIndex(self): + return scsParser.RULE_idtf_system + + + + + def idtf_system(self): + + localctx = scsParser.Idtf_systemContext(self, self._ctx, self.state) + self.enterRule(localctx, 18, self.RULE_idtf_system) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 98 + localctx.value = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==scsParser.T__39 or _la==scsParser.ID_SYSTEM): + localctx.value = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + localctx.el = self._impl.create_node(create_token_context(localctx.value)) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Sentence_assignContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self._ALIAS_SYMBOLS = None # Token + self._idtf_common = None # Idtf_commonContext + + def ALIAS_SYMBOLS(self): + return self.getToken(scsParser.ALIAS_SYMBOLS, 0) + + def idtf_common(self): + return self.getTypedRuleContext(scsParser.Idtf_commonContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_sentence_assign + + + + + def sentence_assign(self): + + localctx = scsParser.Sentence_assignContext(self, self._ctx, self.state) + self.enterRule(localctx, 20, self.RULE_sentence_assign) + try: + self.enterOuterAlt(localctx, 1) + self.state = 101 + localctx._ALIAS_SYMBOLS = self.match(scsParser.ALIAS_SYMBOLS) + self.state = 102 + self.match(scsParser.T__40) + self.state = 103 + localctx._idtf_common = self.idtf_common() + + + context = create_token_context(localctx._ALIAS_SYMBOLS) + self._impl.define_alias(self._impl.create_alias(context), localctx._idtf_common.el) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_edgeContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self.src = None # Idtf_atomicContext + self._connector = None # ConnectorContext + self.attr = None # Attr_listContext + self.trg = None # Idtf_atomicContext + + def connector(self): + return self.getTypedRuleContext(scsParser.ConnectorContext,0) + + + def idtf_atomic(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Idtf_atomicContext) + else: + return self.getTypedRuleContext(scsParser.Idtf_atomicContext,i) + + + def attr_list(self): + return self.getTypedRuleContext(scsParser.Attr_listContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_edge + + + + + def idtf_edge(self): + + localctx = scsParser.Idtf_edgeContext(self, self._ctx, self.state) + self.enterRule(localctx, 22, self.RULE_idtf_edge) + try: + self.enterOuterAlt(localctx, 1) + self.state = 106 + self.match(scsParser.T__41) + self.state = 107 + localctx.src = self.idtf_atomic() + self.state = 108 + localctx._connector = self.connector() + self.state = 110 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,5,self._ctx) + if la_ == 1: + self.state = 109 + localctx.attr = self.attr_list() + + + self.state = 112 + localctx.trg = self.idtf_atomic() + self.state = 113 + self.match(scsParser.T__42) + + self._impl.append_triple(localctx.src.el, localctx._connector.el, localctx.trg.el) + localctx.el = localctx._connector.el + + if localctx.attr is not None: + for a, e in localctx.attr.items: + self._impl.append_triple(a, e, localctx._connector.el) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_set_itemContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1, set:Element=None): + super().__init__(parent, invokingState) + self.parser = parser + self.set = None + self.attr = None # Attr_listContext + self.idtf = None # Idtf_commonContext + self.set = set + + def idtf_common(self): + return self.getTypedRuleContext(scsParser.Idtf_commonContext,0) + + + def attr_list(self): + return self.getTypedRuleContext(scsParser.Attr_listContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_set_item + + + + + def idtf_set_item(self, set:Element): + + localctx = scsParser.Idtf_set_itemContext(self, self._ctx, self.state, set) + self.enterRule(localctx, 24, self.RULE_idtf_set_item) + try: + self.enterOuterAlt(localctx, 1) + self.state = 117 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,6,self._ctx) + if la_ == 1: + self.state = 116 + localctx.attr = self.attr_list() + + + self.state = 119 + localctx.idtf = self.idtf_common() + + edge = self._impl.create_arc(localctx.idtf.el.ctx.clone(), '->') + self._impl.append_triple(localctx.set, edge, localctx.idtf.el) + if localctx.attr is not None: + for a, e in localctx.attr.items: + self._impl.append_triple(a, e, edge) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_set_item_listContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1, set:Element=None): + super().__init__(parent, invokingState) + self.parser = parser + self.set = None + self.set = set + + def idtf_set_item(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Idtf_set_itemContext) + else: + return self.getTypedRuleContext(scsParser.Idtf_set_itemContext,i) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_set_item_list + + + + + def idtf_set_item_list(self, set:Element): + + localctx = scsParser.Idtf_set_item_listContext(self, self._ctx, self.state, set) + self.enterRule(localctx, 26, self.RULE_idtf_set_item_list) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 122 + self.idtf_set_item(set) + self.state = 127 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==scsParser.T__43: + self.state = 123 + self.match(scsParser.T__43) + self.state = 124 + self.idtf_set_item(set) + self.state = 129 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_setContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self.t = None # Token + + def idtf_set_item_list(self): + return self.getTypedRuleContext(scsParser.Idtf_set_item_listContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_set + + + + + def idtf_set(self): + + localctx = scsParser.Idtf_setContext(self, self._ctx, self.state) + self.enterRule(localctx, 28, self.RULE_idtf_set) + try: + self.enterOuterAlt(localctx, 1) + self.state = 130 + localctx.t = self.match(scsParser.T__44) + + context = create_token_context(localctx.t) + localctx.el = self._impl.create_node(context) + + self.state = 132 + self.idtf_set_item_list(localctx.el) + self.state = 133 + self.match(scsParser.T__45) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_atomicContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self._ifdf_alias = None # Ifdf_aliasContext + self._idtf_system = None # Idtf_systemContext + + def ifdf_alias(self): + return self.getTypedRuleContext(scsParser.Ifdf_aliasContext,0) + + + def idtf_system(self): + return self.getTypedRuleContext(scsParser.Idtf_systemContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_atomic + + + + + def idtf_atomic(self): + + localctx = scsParser.Idtf_atomicContext(self, self._ctx, self.state) + self.enterRule(localctx, 30, self.RULE_idtf_atomic) + try: + self.state = 141 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [scsParser.ALIAS_SYMBOLS]: + self.enterOuterAlt(localctx, 1) + self.state = 135 + localctx._ifdf_alias = self.ifdf_alias() + localctx.el = localctx._ifdf_alias.el + pass + elif token in [scsParser.T__39, scsParser.ID_SYSTEM]: + self.enterOuterAlt(localctx, 2) + self.state = 138 + localctx._idtf_system = self.idtf_system() + localctx.el = localctx._idtf_system.el + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_urlContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self._LINK = None # Token + + def LINK(self): + return self.getToken(scsParser.LINK, 0) + + def getRuleIndex(self): + return scsParser.RULE_idtf_url + + + + + def idtf_url(self): + + localctx = scsParser.Idtf_urlContext(self, self._ctx, self.state) + self.enterRule(localctx, 32, self.RULE_idtf_url) + try: + self.enterOuterAlt(localctx, 1) + self.state = 143 + localctx._LINK = self.match(scsParser.LINK) + + context = create_token_context(localctx._LINK) + localctx.el = self._impl.create_link(context, (None if localctx._LINK is None else localctx._LINK.text)[1:-1], Link.Type.URL, False) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_commonContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.el = None + self._idtf_atomic = None # Idtf_atomicContext + self._idtf_edge = None # Idtf_edgeContext + self._idtf_set = None # Idtf_setContext + self._contour = None # ContourContext + self._content = None # ContentContext + self._idtf_url = None # Idtf_urlContext + + def idtf_atomic(self): + return self.getTypedRuleContext(scsParser.Idtf_atomicContext,0) + + + def idtf_edge(self): + return self.getTypedRuleContext(scsParser.Idtf_edgeContext,0) + + + def idtf_set(self): + return self.getTypedRuleContext(scsParser.Idtf_setContext,0) + + + def contour(self): + return self.getTypedRuleContext(scsParser.ContourContext,0) + + + def content(self): + return self.getTypedRuleContext(scsParser.ContentContext,0) + + + def idtf_url(self): + return self.getTypedRuleContext(scsParser.Idtf_urlContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_common + + + + + def idtf_common(self): + + localctx = scsParser.Idtf_commonContext(self, self._ctx, self.state) + self.enterRule(localctx, 34, self.RULE_idtf_common) + try: + self.state = 164 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [scsParser.T__39, scsParser.ID_SYSTEM, scsParser.ALIAS_SYMBOLS]: + self.enterOuterAlt(localctx, 1) + self.state = 146 + localctx._idtf_atomic = self.idtf_atomic() + localctx.el = localctx._idtf_atomic.el + pass + elif token in [scsParser.T__41]: + self.enterOuterAlt(localctx, 2) + self.state = 149 + localctx._idtf_edge = self.idtf_edge() + localctx.el = localctx._idtf_edge.el + pass + elif token in [scsParser.T__44]: + self.enterOuterAlt(localctx, 3) + self.state = 152 + localctx._idtf_set = self.idtf_set() + localctx.el = localctx._idtf_set.el + pass + elif token in [scsParser.CONTOUR_BEGIN]: + self.enterOuterAlt(localctx, 4) + self.state = 155 + localctx._contour = self.contour() + localctx.el = localctx._contour.el + pass + elif token in [scsParser.T__0, scsParser.CONTENT_BODY]: + self.enterOuterAlt(localctx, 5) + self.state = 158 + localctx._content = self.content() + localctx.el = localctx._content.el + pass + elif token in [scsParser.LINK]: + self.enterOuterAlt(localctx, 6) + self.state = 161 + localctx._idtf_url = self.idtf_url() + localctx.el = localctx._idtf_url.el + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Idtf_listContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.items = None + self.first = None # Idtf_commonContext + self.second = None # Idtf_commonContext + + def idtf_common(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Idtf_commonContext) + else: + return self.getTypedRuleContext(scsParser.Idtf_commonContext,i) + + + def internal_sentence_list(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Internal_sentence_listContext) + else: + return self.getTypedRuleContext(scsParser.Internal_sentence_listContext,i) + + + def getRuleIndex(self): + return scsParser.RULE_idtf_list + + + + + def idtf_list(self): + + localctx = scsParser.Idtf_listContext(self, self._ctx, self.state) + self.enterRule(localctx, 36, self.RULE_idtf_list) + localctx.items = [] + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 166 + localctx.first = self.idtf_common() + localctx.items.append(localctx.first.el) + self.state = 169 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==scsParser.T__46: + self.state = 168 + self.internal_sentence_list(localctx.first.el) + + + self.state = 179 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,12,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 171 + self.match(scsParser.T__43) + self.state = 172 + localctx.second = self.idtf_common() + localctx.items.append(localctx.second.el) + self.state = 175 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==scsParser.T__46: + self.state = 174 + self.internal_sentence_list(localctx.first.el) + + + self.state = 181 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,12,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Internal_sentenceContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1, src:Element=None): + super().__init__(parent, invokingState) + self.parser = parser + self.src = None + self.c = None # ConnectorContext + self.attr = None # Attr_listContext + self.target = None # Idtf_listContext + self.src = src + + def connector(self): + return self.getTypedRuleContext(scsParser.ConnectorContext,0) + + + def idtf_list(self): + return self.getTypedRuleContext(scsParser.Idtf_listContext,0) + + + def attr_list(self): + return self.getTypedRuleContext(scsParser.Attr_listContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_internal_sentence + + + + + def internal_sentence(self, src:Element): + + localctx = scsParser.Internal_sentenceContext(self, self._ctx, self.state, src) + self.enterRule(localctx, 38, self.RULE_internal_sentence) + try: + self.enterOuterAlt(localctx, 1) + self.state = 182 + localctx.c = self.connector() + self.state = 184 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,13,self._ctx) + if la_ == 1: + self.state = 183 + localctx.attr = self.attr_list() + + + self.state = 186 + localctx.target = self.idtf_list() + + + for t in localctx.target.items: + edge = None + if isinstance(localctx.c.el, Edge): + edge = self._impl.create_edge(localctx.c.el.ctx, localctx.c.el.connector) + else: + edge = self._impl.create_arc(localctx.c.el.ctx, localctx.c.el.connector) + self._impl.append_triple(localctx.src, edge, t) + if localctx.attr is not None: + for a, e in localctx.attr.items: + self._impl.append_triple(a, e, edge) + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Internal_sentence_listContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1, src:Element=None): + super().__init__(parent, invokingState) + self.parser = parser + self.src = None + self.src = src + + def internal_sentence(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Internal_sentenceContext) + else: + return self.getTypedRuleContext(scsParser.Internal_sentenceContext,i) + + + def SENTENCE_SEP(self, i:int=None): + if i is None: + return self.getTokens(scsParser.SENTENCE_SEP) + else: + return self.getToken(scsParser.SENTENCE_SEP, i) + + def getRuleIndex(self): + return scsParser.RULE_internal_sentence_list + + + + + def internal_sentence_list(self, src:Element): + + localctx = scsParser.Internal_sentence_listContext(self, self._ctx, self.state, src) + self.enterRule(localctx, 40, self.RULE_internal_sentence_list) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 189 + self.match(scsParser.T__46) + self.state = 193 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 190 + self.internal_sentence(src) + self.state = 191 + self.match(scsParser.SENTENCE_SEP) + self.state = 195 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << scsParser.T__1) | (1 << scsParser.T__2) | (1 << scsParser.T__3) | (1 << scsParser.T__4) | (1 << scsParser.T__5) | (1 << scsParser.T__6) | (1 << scsParser.T__7) | (1 << scsParser.T__8) | (1 << scsParser.T__9) | (1 << scsParser.T__10) | (1 << scsParser.T__11) | (1 << scsParser.T__12) | (1 << scsParser.T__13) | (1 << scsParser.T__14) | (1 << scsParser.T__15) | (1 << scsParser.T__16) | (1 << scsParser.T__17) | (1 << scsParser.T__18) | (1 << scsParser.T__19) | (1 << scsParser.T__20) | (1 << scsParser.T__21) | (1 << scsParser.T__22) | (1 << scsParser.T__23) | (1 << scsParser.T__24) | (1 << scsParser.T__25) | (1 << scsParser.T__26) | (1 << scsParser.T__27) | (1 << scsParser.T__28) | (1 << scsParser.T__29) | (1 << scsParser.T__30) | (1 << scsParser.T__31) | (1 << scsParser.T__32) | (1 << scsParser.T__33) | (1 << scsParser.T__34) | (1 << scsParser.T__35) | (1 << scsParser.T__36) | (1 << scsParser.T__37) | (1 << scsParser.T__38))) != 0)): + break + + self.state = 197 + self.match(scsParser.T__47) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Sentence_lvl_4_list_itemContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1, src:Element=None): + super().__init__(parent, invokingState) + self.parser = parser + self.src = None + self.c = None # ConnectorContext + self.attr = None # Attr_listContext + self.target = None # Idtf_listContext + self.src = src + + def connector(self): + return self.getTypedRuleContext(scsParser.ConnectorContext,0) + + + def idtf_list(self): + return self.getTypedRuleContext(scsParser.Idtf_listContext,0) + + + def attr_list(self): + return self.getTypedRuleContext(scsParser.Attr_listContext,0) + + + def getRuleIndex(self): + return scsParser.RULE_sentence_lvl_4_list_item + + + + + def sentence_lvl_4_list_item(self, src:Element): + + localctx = scsParser.Sentence_lvl_4_list_itemContext(self, self._ctx, self.state, src) + self.enterRule(localctx, 42, self.RULE_sentence_lvl_4_list_item) + try: + self.enterOuterAlt(localctx, 1) + self.state = 199 + localctx.c = self.connector() + self.state = 201 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,15,self._ctx) + if la_ == 1: + self.state = 200 + localctx.attr = self.attr_list() + + + self.state = 203 + localctx.target = self.idtf_list() + + for t in localctx.target.items: + edge = None + if isinstance(localctx.c.el, Edge): + edge = self._impl.create_edge(localctx.c.el.ctx, localctx.c.el.connector) + else: + edge = self._impl.create_arc(localctx.c.el.ctx, localctx.c.el.connector) + + self._impl.append_triple(localctx.src, edge, t) + if localctx.attr is not None: + for a, e in localctx.attr.items: + self._impl.append_triple(a, e, edge) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Sentence_lvl_commonContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self._idtf_common = None # Idtf_commonContext + + def idtf_common(self): + return self.getTypedRuleContext(scsParser.Idtf_commonContext,0) + + + def sentence_lvl_4_list_item(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(scsParser.Sentence_lvl_4_list_itemContext) + else: + return self.getTypedRuleContext(scsParser.Sentence_lvl_4_list_itemContext,i) + + + def getRuleIndex(self): + return scsParser.RULE_sentence_lvl_common + + + + + def sentence_lvl_common(self): + + localctx = scsParser.Sentence_lvl_commonContext(self, self._ctx, self.state) + self.enterRule(localctx, 44, self.RULE_sentence_lvl_common) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 207 + localctx._idtf_common = self.idtf_common() + self.state = 208 + self.sentence_lvl_4_list_item(localctx._idtf_common.el) + self.state = 213 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==scsParser.T__43: + self.state = 209 + self.match(scsParser.T__43) + self.state = 210 + self.sentence_lvl_4_list_item(localctx._idtf_common.el) + self.state = 215 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Attr_listContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.items = None + self._ID_SYSTEM = None # Token + self._EDGE_ATTR = None # Token + + def ID_SYSTEM(self, i:int=None): + if i is None: + return self.getTokens(scsParser.ID_SYSTEM) + else: + return self.getToken(scsParser.ID_SYSTEM, i) + + def EDGE_ATTR(self, i:int=None): + if i is None: + return self.getTokens(scsParser.EDGE_ATTR) + else: + return self.getToken(scsParser.EDGE_ATTR, i) + + def getRuleIndex(self): + return scsParser.RULE_attr_list + + + + + def attr_list(self): + + localctx = scsParser.Attr_listContext(self, self._ctx, self.state) + self.enterRule(localctx, 46, self.RULE_attr_list) + localctx.items = [] + try: + self.enterOuterAlt(localctx, 1) + self.state = 219 + self._errHandler.sync(self) + _alt = 1 + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1: + self.state = 216 + localctx._ID_SYSTEM = self.match(scsParser.ID_SYSTEM) + self.state = 217 + localctx._EDGE_ATTR = self.match(scsParser.EDGE_ATTR) + + node = self._impl.create_node(create_token_context(localctx._ID_SYSTEM)) + edge = None + connector = "->" if (None if localctx._EDGE_ATTR is None else localctx._EDGE_ATTR.text) == ":" else "_->" + edge = self._impl.create_arc(create_token_context(localctx._EDGE_ATTR), connector) + + localctx.items.append((node, edge)) + + + else: + raise NoViableAltException(self) + self.state = 221 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,17,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + + def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): + if self._predicates == None: + self._predicates = dict() + self._predicates[1] = self.contour_sempred + pred = self._predicates.get(ruleIndex, None) + if pred is None: + raise Exception("No predicate with index:" + str(ruleIndex)) + else: + return pred(localctx, predIndex) + + def contour_sempred(self, localctx:ContourContext, predIndex:int): + if predIndex == 0: + return count > 0 + + + + + diff --git a/db/sc/scs/error.py b/db/sc/scs/error.py new file mode 100644 index 0000000..dca6a1a --- /dev/null +++ b/db/sc/scs/error.py @@ -0,0 +1,7 @@ + +class AlreadyExistError(Exception): + pass + + +class UnsupportedError(Exception): + pass diff --git a/db/sc/scs/parse_issue.py b/db/sc/scs/parse_issue.py new file mode 100644 index 0000000..01a1503 --- /dev/null +++ b/db/sc/scs/parse_issue.py @@ -0,0 +1,19 @@ +from enum import Enum + + +class ParseIssue: + class Type(Enum): + WARNING = 1 + ERROR = 2 + + def __init__(self, line: int, char_pos: int, token: str, msg: str, type: Type) -> None: + self._line = line + self._char_pos = char_pos + self._token = token + self._msg = msg + self._type = type + + def __repr__(self) -> str: + token = self._token if self._token is not None else "" + return (f"{self._type.name}: {self._msg}. " + f"Line {self._line}:{self._char_pos} - '{token}'") diff --git a/db/sc/scs/parser.py b/db/sc/scs/parser.py new file mode 100644 index 0000000..35e9e68 --- /dev/null +++ b/db/sc/scs/parser.py @@ -0,0 +1,116 @@ +import antlr4 + +from antlr4.error.Errors import ParseCancellationException +from antlr4.error.ErrorListener import ErrorListener, ConsoleErrorListener +from antlr4.error.ErrorStrategy import BailErrorStrategy + +from enum import Enum +from typing import List, Tuple + + +from .antlr import SCsLexerAntlr, SCsParserAntlr +from .parse_issue import ParseIssue +from .parser_impl import SCsParserImpl, Triple + + +class SCsParser: + + class SyntaxErrorListener(ErrorListener): + + def __init__(self) -> None: + super().__init__() + self._errors = [] + + @property + def errors(self): + return self._errors + + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): + error = ParseIssue( + line=line, + char_pos=column, + token=e.offendingToken.text if e.offendingToken else None, + msg=msg, + type=ParseIssue.Type.ERROR) + + self._errors.append(error) + + def __init__(self) -> None: + self._error_listener = SCsParser.SyntaxErrorListener() + self._impl = SCsParserImpl() + self._errors = [] + self._warnings = [] + + @property + def errors(self) -> List[ParseIssue]: + """Returns list of errors""" + return self._errors + + @property + def warnings(self) -> List[ParseIssue]: + """Returns list of warnings""" + return self._warnings + + @property + def triples(self) -> List[Triple]: + """Returns list of parsed triples in format (src, edge, trg)""" + return self._impl.triples + + @property + def type_triples(self) -> List[Tuple]: + """Returns list of parsed type triples in format (src, edge, trg). + + Source of each triple is a keynode that determines type of sc-element. + """ + return self._impl.triples + + def has_errors(self) -> bool: + return len(self.errors) > 0 + + def has_warnings(self) -> bool: + return len(self.warnings) + + def parse(self, data: str) -> bool: + """Runs SCs-text parsing from string `data`. + + If there were any errors during parser, then returns `False`. `errors` + list will contain all errors occured during parsing. + """ + return self._parseImpl(antlr4.InputStream(data)) + + def parseFile(self, file_path: str) -> bool: + """Runs SCs-test parsing from file + + If there were any errors during parser, then returns `False`. `errors` + list will contain all errors occured during parsing. + """ + return self._parseImpl(antlr4.FileStream(file_path, encoding='utf-8')) + + def _parseImpl(self, stream: antlr4.InputStream) -> bool: + + lexer = SCsLexerAntlr(input=stream) + lexer.addErrorListener(self._error_listener) + lexer.removeErrorListener(ConsoleErrorListener.INSTANCE) + + stream = antlr4.CommonTokenStream(lexer=lexer) + parser = SCsParserAntlr(input=stream) + + parser._impl = self._impl + parser._errHandler = BailErrorStrategy() + parser.addErrorListener(self._error_listener) + # do not print errors to console + parser.removeErrorListener(ConsoleErrorListener.INSTANCE) + + try: + parser.syntax() + except ParseCancellationException as ex: + if len(self._error_listener.errors) + len(self._impl.errors) == 0: + self._errors.append(ParseIssue( + 0, 0, '', "Wasn't able to parse input", ParseIssue.Type.ERROR)) + + self._errors.extend(self._error_listener.errors) + self._errors.extend(self._impl.errors) + + self._warnings.extend(self._impl.warnings) + + return len(self.errors) == 0 diff --git a/db/sc/scs/parser_impl.py b/db/sc/scs/parser_impl.py new file mode 100644 index 0000000..63169b7 --- /dev/null +++ b/db/sc/scs/parser_impl.py @@ -0,0 +1,156 @@ +from sc.core.keynodes import KeynodeNames + +from typing import Dict, List, Set, Tuple, Union + +from .parse_issue import ParseIssue +from .types import Alias, Arc, Edge, Element, Link, Node, TokenContext + +from antlr4.error.Errors import ParseCancellationException + + +Triple = Tuple[Element, Union[Arc, Edge], Element] + + +class SCsParserImpl: + + def __init__(self) -> None: + # list of parsed triples + self._triples = [] + + # Dictionary of element types. + # They should be in format: {element.name, set(str)} + self._types = {} + + # Counter for generated names + self._names_counter = 0 + + # Dictionary of alias definitions + self._aliases = {} + + # List of warnings + self._errors = [] + self._warnings = [] + + # Stack of contours + self._contour_stack = [] + + @property + def errors(self) -> List[ParseIssue]: + return self._errors + + @property + def warnings(self) -> List[ParseIssue]: + return self._warnings + + @property + def types(self) -> Dict[str, Set[str]]: + """Returns dictionary with element types""" + return self._types + + @property + def triples(self) -> List[Triple]: + """Returns list of triples""" + return self._triples + + def _process_name(self, name: str, prefix: str = "el", force_var: bool = False) -> str: + def next_id(): + self._names_counter += 1 + return self._names_counter + + if name is None or name == "..." or name == "{": + name = f"..{'_' if force_var else ''}{prefix}_generated_{next_id()}" + + return name + + def _is_type_keynode(self, name: str) -> bool: + """Checks if specified name if a type keynode""" + return name in self._type_keynodes + + def _break_parsing(self, msg: str): + """Raises error to break parsing""" + raise ParseCancellationException(msg) + + def create_alias(self, ctx: TokenContext) -> Alias: + return Alias(ctx.text, ctx, None) + + def create_node(self, ctx: TokenContext) -> Node: + return Node(self._process_name(ctx.text, prefix="node"), ctx) + + def create_link(self, ctx: TokenContext, value: Link.Value, type: Link.Type, is_var: bool) -> Link: + return Link(self._process_name(None, prefix="link", force_var=is_var), + self._link_remove_escape_symbols(value), + type, ctx) + + def create_edge(self, ctx: TokenContext, connector: str) -> Edge: + return Edge(connector, self._process_name(None, prefix="edge"), ctx) + + def create_arc(self, ctx: TokenContext, connector: str) -> Arc: + return Arc(connector, self._process_name(None, prefix="arc"), ctx) + + def append_triple(self, src: Element, edge: Union[Edge, Arc], trg: Element): + def _resolve_alias(alias: Alias) -> Element: + try: + return self._aliases[alias.name].target + except KeyError: + self._new_error(alias.ctx, + f"Alias {alias.name} is not defined") + self._break_parsing(f"Alias {alias.name} is not defined") + + if src.kind == Element.Kind.ALIAS: + src = _resolve_alias(src) + if trg.kind == Element.Kind.ALIAS: + trg = _resolve_alias(trg) + + triples = self._contour_stack[-1] if len(self._contour_stack) > 0 else self._triples + + if edge._reverse_if_back(): + triples.append((trg, edge, src)) + else: + triples.append((src, edge, trg)) + + def define_alias(self, alias: Alias, target: Element) -> Alias: + """Create new alias to specified element""" + assert alias.target is None + try: + prev_alias = self._aliases[alias.name] + self._new_warning(alias.ctx, + (f"Alias {alias.name} was previously defined at line: {prev_alias.ctx.line}" + " column: {prev_alias.ctx.column}")) + except KeyError: + pass + + new_alias = Alias(alias.name, alias.ctx, target) + self._aliases[new_alias.name] = new_alias + return new_alias + + def _new_error(self, ctx: TokenContext, msg: str): + self._errors.append( + ParseIssue(ctx.line, ctx.column, ctx.text, msg, ParseIssue.Type.ERROR)) + + def _new_warning(self, ctx: TokenContext, msg: str): + self._warnings.append( + ParseIssue(ctx.line, ctx.column, ctx.text, msg, ParseIssue.Type.ERROR)) + + def _link_remove_escape_symbols(self, input: str) -> str: + return input.replace("\\[", "[").replace("\\]", "]").replace("\\\\", "\\") + + def start_contour(self): + self._contour_stack.append([]) + + def end_contour(self, contour: Node): + contour_triples = self._contour_stack.pop() + self._triples.extend(contour_triples) + + # append elements into contour + added = set() + + def add_element(child: Element): + if child.name not in added: + edge = self.create_arc(TokenContext(-1, -1, "->"), "->") + self._triples.append((contour, edge, child)) + added.add(child.name) + + for src, edge, trg in contour_triples: + add_element(src) + add_element(edge) + add_element(trg) diff --git a/db/sc/scs/scs.g4 b/db/sc/scs/scs.g4 new file mode 100644 index 0000000..bea5b94 --- /dev/null +++ b/db/sc/scs/scs.g4 @@ -0,0 +1,262 @@ +grammar scs; + +options { + language = Python3; +} + +@parser::header { +from sc.scs.types import * +from enum import Enum + +def create_token_context(ctx: any) -> TokenContext: + return TokenContext(line=ctx.line, column=ctx.column, text=ctx.text) + +class ConnectorType: + ARC = 0 + EDGE = 1 +} + +content + returns[Element el]: (v='_')? CONTENT_BODY { +token_context = create_token_context($CONTENT_BODY) +$ctx.el = self._impl.create_link(token_context, $CONTENT_BODY.text[1:-1], Link.Type.STRING, $v is not None) +}; + +contour returns[Element el] + @init {count = 1}: + CONTOUR_BEGIN {count > 0}? {self._impl.start_contour()} (sentence_wrap*) CONTOUR_END { +count -= 1 +if count == 0: + contour = self._impl.create_node(create_token_context($CONTOUR_BEGIN)) + self._impl.end_contour(contour) + $ctx.el = contour +}; + +connector_edge + returns[Element el]: + symbol = ( + '<>' + | '<=>' + | '_<>' + | '_<=>' + | '>' + | '<' + | '=>' + | '<=' + | '_=>' + | '_<=' + ) {$ctx.el = self._impl.create_edge(create_token_context($symbol), $symbol.text)}; + +connector_arc + returns[Element el]: + symbol = ( + '..>' + | '<..' + | '->' + | '<-' + | '-|>' + | '<|-' + | '-/>' + | '' + | '<~' + | '~|>' + | '<|~' + | '~/>' + | '' + | '_<..' + | '_->' + | '_<-' + | '_-|>' + | '_<|-' + | '_-/>' + | '_' + | '_<~' + | '_~|>' + | '_<|~' + | '_~/>' + | '_') +self._impl.append_triple($set, edge, $idtf.el) +if $ctx.attr is not None: + for a, e in $attr.items: + self._impl.append_triple(a, e, edge) +}; + +idtf_set_item_list[Element set]: + idtf_set_item[set] (';' idtf_set_item[set])*; + +idtf_set + returns[Element el]: + t = '{' { +context = create_token_context($t) +$ctx.el = self._impl.create_node(context) + } idtf_set_item_list[$ctx.el] '}'; + +idtf_atomic + returns[Element el]: + ifdf_alias {$ctx.el = $ifdf_alias.el} + | idtf_system {$ctx.el = $idtf_system.el}; + +idtf_url + returns[Element el]: + LINK { +context = create_token_context($LINK) +$ctx.el = self._impl.create_link(context, $LINK.text[1:-1], Link.Type.URL, False) +}; + +idtf_common + returns[Element el]: + idtf_atomic {$ctx.el = $idtf_atomic.el} + | idtf_edge {$ctx.el = $idtf_edge.el} + | idtf_set {$ctx.el = $idtf_set.el} + | contour {$ctx.el = $contour.el} + | content {$ctx.el = $content.el} + | idtf_url {$ctx.el = $idtf_url.el}; + +idtf_list + returns[items] + @init {$ctx.items = []}: + first = idtf_common {$ctx.items.append($first.el)} internal_sentence_list[$first.el]? ( + ';' second = idtf_common {$ctx.items.append($second.el)} internal_sentence_list[$first.el]? + )*; + +internal_sentence[Element src]: + c = connector attr = attr_list? target = idtf_list { + +for t in $target.items: + edge = None + if isinstance($c.el, Edge): + edge = self._impl.create_edge($c.el.ctx, $c.el.connector) + else: + edge = self._impl.create_arc($c.el.ctx, $c.el.connector) + self._impl.append_triple($src, edge, t) + if $ctx.attr is not None: + for a, e in $attr.items: + self._impl.append_triple(a, e, edge) + +}; + +internal_sentence_list[Element src]: + '(*' (internal_sentence[src] SENTENCE_SEP)+ '*)'; + +sentence_lvl_4_list_item[Element src]: + (c = connector attr = attr_list? target = idtf_list) { +for t in $target.items: + edge = None + if isinstance($c.el, Edge): + edge = self._impl.create_edge($c.el.ctx, $c.el.connector) + else: + edge = self._impl.create_arc($c.el.ctx, $c.el.connector) + + self._impl.append_triple($src, edge, t) + if $ctx.attr is not None: + for a, e in $attr.items: + self._impl.append_triple(a, e, edge) +}; + +sentence_lvl_common: + idtf_common sentence_lvl_4_list_item[$idtf_common.el] ( + ';' sentence_lvl_4_list_item[$idtf_common.el] + )*; + +attr_list + returns[items] + @init {$ctx.items = []}: ( + ID_SYSTEM EDGE_ATTR { +node = self._impl.create_node(create_token_context($ID_SYSTEM)) +edge = None +connector = "->" if $EDGE_ATTR.text == ":" else "_->" +edge = self._impl.create_arc(create_token_context($EDGE_ATTR), connector) + +$ctx.items.append((node, edge)) +} + )+; + +// ---------------------------- + +ID_SYSTEM: ('a' ..'z' | 'A' ..'Z' | '_' | '.' | '0' ..'9')+; + +ALIAS_SYMBOLS: + '@' ('a' ..'z' | 'A' ..'Z' | '_' | '0' ..'9')+; + +fragment CONTENT_ESCAPED: '\\' ('[' | ']' | '\\'); + +fragment CONTENT_SYBMOL: (CONTENT_ESCAPED | ~('[' | ']' | '\\')); + +fragment CONTENT_SYBMOL_FIRST_END: ( + CONTENT_ESCAPED + | ~('[' | ']' | '\\' | '*') + ); + +CONTOUR_BEGIN: '[*'; + +CONTOUR_END: '*]'; + +CONTENT_BODY: + '[]' + | '[' CONTENT_SYBMOL_FIRST_END CONTENT_SYBMOL* ']'; + +LINK: '"' (~('"') | '\\"')* '"'; + +EDGE_ATTR: ':' | '::'; + +LINE_TERMINATOR: [\r\n\u2028\u2029] -> channel(HIDDEN); + +LINE_COMMENT: + '//' ~('\n' | '\r')* '\r'? '\n' -> channel(HIDDEN); + +MULTINE_COMMENT: '/*' .*? '*/' -> channel(HIDDEN); + +WS: ( ' ' | '\t' | '\r' | '\n') -> channel(HIDDEN); + +SENTENCE_SEP: ';;'; diff --git a/db/sc/scs/types.py b/db/sc/scs/types.py new file mode 100644 index 0000000..2cb390d --- /dev/null +++ b/db/sc/scs/types.py @@ -0,0 +1,156 @@ +from enum import Enum +from sys import is_finalizing +from typing import Union + +import re + + +class TokenContext: + + def __init__(self, line: int, column: int, text: str) -> None: + self.line = line + self.column = column + self.text = text + + def __repr__(self) -> str: + return f"{{ line: {self.line}, column: {self.column}, text: `{self.text}`}}" + + def clone(self): + return TokenContext(self.line, self.column, self.text) + + +class Element: + + class Kind(Enum): + ALIAS = 0 + NODE = 1 + EDGE = 2 + ARC = 3 + LINK = 4 + + def __init__(self, kind: Kind, name: str, ctx: TokenContext) -> None: + self.kind = kind + self.name = name + self.ctx = ctx + + def is_const(self) -> bool: + return not self.is_var() + + def is_var(self) -> bool: + return re.fullmatch(re.compile("^\.{0,2}_.*$"), self.name) is not None + + def __repr__(self) -> str: + return self._to_str() + + def _to_str(self, **kwargs): + attrs = ', '.join([f"{k}: {v}" for k, v in kwargs.items()]) + return( + f"{self.__class__.__name__}(ctx: {self.ctx}, " + f"kind: {self.kind.name}, " + f"name: {self.name}" + f"{', ' + attrs if len(attrs) > 0 else ''})") + + +class Alias(Element): + + def __init__(self, name: str, ctx: TokenContext, target: Element) -> None: + super().__init__(Element.Kind.ALIAS, name, ctx) + + self.target = target + + def __repr__(self) -> str: + return self._to_str(target=self.target) + + +class Node(Element): + + def __init__(self, name: str, ctx: TokenContext) -> None: + super().__init__(Element.Kind.NODE, name, ctx) + + +class Edge(Element): + + VALID = {'<>', '<=>', '_<>', '_<=>', '>', + '<', '=>', '<=', '_=>', '_<='} + BACKWARD = {'<', '<=', '_<='} + REVERSE_DICT = { + '<': '>', + '<=': '=>', + '_<=': '_=>' + } + + def __init__(self, connector: str, name: str, ctx: TokenContext) -> None: + super().__init__(Element.Kind.EDGE, name, ctx) + + self.connector = connector + if self.connector not in Edge.VALID: + raise KeyError( + f"Connector `{connector}` is not supported. List of supported connectors: {Edge.VALID}") + + def _reverse_if_back(self) -> bool: + """Reverse connector to forward direction. + + If connector was reversed, then returns True. + """ + if self.connector in Edge.BACKWARD: + self.connector = Edge.REVERSE_DICT[self.connector] + return True + + return False + + def __repr__(self) -> str: + return self._to_str(connector=self.connector) + + +class Arc(Element): + + VALID = {'..>', '<..', '->', '<-', '-|>', '<|-', '-/>', '', '<~', '~|>', '<|~', '~/>', '', '_<..', '_->', '_<-', '_-|>', '_<|-', '_-/>', '_', '_<~', '_~|>', '_<|~', '_~/>', '_', + '<-': '->', '<|-': '-|>', '', '<~': '~>', '<|~': "~|>", '', + '_<..': '_..>', '_<-': '_->', '_<|-': '_-|>', '_', '_<~': '_~>', '_<|~': '_~|>', '_' + } + + def __init__(self, connector: str, name: str, ctx: TokenContext) -> None: + super().__init__(Element.Kind.ARC, name, ctx) + + self.connector = connector + + if self.connector not in Arc.VALID: + raise KeyError( + f"Connector `{connector}` is not supported. List of supported connectors: {Arc.VALID}") + + def _reverse_if_back(self) -> bool: + """Reverse connector to forward direction. + + If connector was reversed, then returns True. + """ + if self.connector in Arc.BACKWARD: + self.connector = Arc.REVERSE_DICT[self.connector] + return True + + return False + + def __repr__(self) -> str: + return self._to_str(connector=self.connector) + + +class Link(Element): + + Value = Union[str, int, float] + + class Type(Enum): + STRING = 0 + URL = 1 + + def __init__(self, name: str, value: Value, type_: Type, ctx: TokenContext) -> None: + super().__init__(Element.Kind.LINK, name, ctx) + + self.value = value + self.type = type_ + + def __repr__(self) -> str: + return self._to_str(value=self.value, type=self.type.name) diff --git a/db/scripts/generate_antlr.sh b/db/scripts/generate_antlr.sh new file mode 100755 index 0000000..f9a0748 --- /dev/null +++ b/db/scripts/generate_antlr.sh @@ -0,0 +1,6 @@ +if [ -z "$1" ] +then + echo "Please specify path to antlr tool. For example:\033[1;34m generate_antlr.sh antlr-4.9.2-complete.jar\033[0m" +else + java -jar $1 -Dlanguage=Python3 -o sc/scs/antlr -no-listener -no-visitor -Xexact-output-dir sc/scs/scs.g4 +fi \ No newline at end of file diff --git a/db/tests/units/scs/__init__.py b/db/tests/units/scs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/db/tests/units/scs/test_scs_alias.py b/db/tests/units/scs/test_scs_alias.py new file mode 100644 index 0000000..13f2738 --- /dev/null +++ b/db/tests/units/scs/test_scs_alias.py @@ -0,0 +1,81 @@ +import unittest + +from sc.scs.parser import SCsParser +from sc.scs.types import * + + +class TestSCsAlias(unittest.TestCase): + + def test_edge_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse("@alias = (x -> y);;")) + self.assertEqual(len(parser.triples), 1) + + def test_contour_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse( + ("@alias = _[];;" + "x -> [* @alias2 = y;;" + " @alias _~> @alias2;;" + "*];;"))) + + self.assertEqual(len(parser.triples), 5) + + def test_usage(self): + parser = SCsParser() + + self.assertTrue(parser.parse("@alias = [];; x ~> @alias;;")) + self.assertEqual(len(parser.triples), 1) + self.assertFalse(parser.has_warnings()) + + src, _, trg = parser.triples[0] + + self.assertIsInstance(src, Node) + self.assertIsInstance(trg, Link) + + def test_not_defined(self): + parser = SCsParser() + + self.assertFalse(parser.parse("x -> @alias;;")) + self.assertEqual(len(parser.errors), 1) + self.assertFalse(parser.has_warnings()) + + def test_reassign_warning(self): + parser = SCsParser() + + self.assertTrue(parser.parse( + ("@alias = (x -> y);;" + "@alias = (z -> y);;"))) + self.assertEqual(len(parser.triples), 2) + self.assertEqual(len(parser.warnings), 1) + + def test_reassign(self): + parser = SCsParser() + + self.assertTrue(parser.parse( + ("@alias = x;;" + "y -> @alias;;" + "@alias = z;;" + "@alias -> c;;"))) + + self.assertEqual(len(parser.triples), 2) + + src, _, _ = parser.triples[1] + self.assertEqual(src.name, "z") + + def test_assign_alias_to_alias(self): + parser = SCsParser() + + self.assertTrue(parser.parse( + ("@alias1 = x;;" + "@alias1 <- sc_node_tuple;;" + "@alias2 = @alias1;;" + "_y -|> @alias2;;"))) + + self.assertEqual(len(parser.triples), 2) + self.assertFalse(parser.has_warnings()) + + _, _, trg = parser.triples[0] + self.assertEqual(trg.name, "x") diff --git a/db/tests/units/scs/test_scs_level2.py b/db/tests/units/scs/test_scs_level2.py new file mode 100644 index 0000000..68e0a34 --- /dev/null +++ b/db/tests/units/scs/test_scs_level2.py @@ -0,0 +1,85 @@ +import unittest + +from sc.scs.types import * +from sc.scs.parser import SCsParser + + +class TestSCsLevel2(unittest.TestCase): + + def test_reverse(self): + parser = SCsParser() + + self.assertTrue(parser.parse("b <- a;;")) + self.assertEqual(len(parser.triples), 1) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "b") + + def test_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> (b <- c);;")) + self.assertEqual(len(parser.triples), 2) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "c") + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "b") + + src, edge, trg = parser.triples[1] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + + def test_smoke_2(self): + parser = SCsParser() + + self.assertTrue(parser.parse("(c <- x) <- (b <> y);;")) + self.assertEqual(len(parser.triples), 3) + + src, edge, trg = parser.triples[0] + self.assertIsInstance(src, Node) + self.assertEqual(src.name, "x") + self.assertIsInstance(edge, Arc) + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Node) + self.assertEqual(trg.name, "c") + + src, edge, trg = parser.triples[1] + self.assertIsInstance(src, Node) + self.assertEqual(src.name, "b") + self.assertIsInstance(edge, Edge) + self.assertEqual(edge.connector, "<>") + self.assertIsInstance(trg, Node) + self.assertEqual(trg.name, "y") + + src, edge, trg = parser.triples[2] + self.assertIsInstance(src, Edge) + self.assertIsInstance(edge, Arc) + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + + def test_unnamed(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> ...; -> ...;;")) + self.assertEqual(len(parser.triples), 2) + + _, _, trg = parser.triples[0] + self.assertIsInstance(trg, Node) + + _, _, trg = parser.triples[1] + self.assertIsInstance(trg, Node) + + def test_invalid(self): + tests = ["a -> (x -> (y -> z));;", + "a -> (x -> [content]);;", + "a -> (x -> [* y -> z ;; *]);;", + "a -> (x -> { y; z });;"] + + for t in tests: + parser = SCsParser() + self.assertFalse(parser.parse(t)) + self.assertGreater(len(parser.errors), 0) diff --git a/db/tests/units/scs/test_scs_level3.py b/db/tests/units/scs/test_scs_level3.py new file mode 100644 index 0000000..e2496f9 --- /dev/null +++ b/db/tests/units/scs/test_scs_level3.py @@ -0,0 +1,61 @@ +import unittest + +from sc.scs.types import * +from sc.scs.parser import SCsParser + + +class TestSCsLevel3(unittest.TestCase): + + def test_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> c: _b:: d;;")) + self.assertEqual(len(parser.triples), 3) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "d") + + src, edge, trg = parser.triples[1] + self.assertEqual(src.name, "c") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + + src, edge, trg = parser.triples[2] + self.assertEqual(src.name, "_b") + self.assertEqual(edge.connector, "_->") + self.assertIsInstance(trg, Arc) + + def test_complex(self): + parser = SCsParser() + + self.assertTrue(parser.parse("(a _<- f: d) -/> (c ~> b:: d);;")) + self.assertEqual(len(parser.triples), 5) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "d") + self.assertEqual(edge.connector, "_->") + self.assertEqual(trg.name, "a") + + src, edge, trg = parser.triples[1] + self.assertEqual(src.name, "f") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + self.assertEqual(trg.connector, "_->") + + src, edge, trg = parser.triples[2] + self.assertEqual(src.name, "c") + self.assertEqual(edge.connector, "~>") + self.assertEqual(trg.name, "d") + + src, edge, trg = parser.triples[3] + self.assertEqual(src.name, "b") + self.assertEqual(edge.connector, "_->") + self.assertIsInstance(trg, Arc) + self.assertEqual(trg.connector, "~>") + + src, edge, trg = parser.triples[4] + self.assertIsInstance(src, Arc) + self.assertEqual(edge.connector, "-/>") + self.assertIsInstance(trg, Arc) diff --git a/db/tests/units/scs/test_scs_level4.py b/db/tests/units/scs/test_scs_level4.py new file mode 100644 index 0000000..4f4d545 --- /dev/null +++ b/db/tests/units/scs/test_scs_level4.py @@ -0,0 +1,55 @@ +import unittest + +from sc.scs.parser import SCsParser +from sc.scs.types import * + + +class TestSCsLevel4(unittest.TestCase): + + def test_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> b: c; d;;")) + self.assertEqual(len(parser.triples), 4) + + src, _, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(trg.name, "c") + + src, _, trg = parser.triples[1] + self.assertEqual(src.name, "b") + self.assertIsInstance(trg, Arc) + + src, _, trg = parser.triples[2] + self.assertEqual(src.name, "a") + self.assertEqual(trg.name, "d") + + src, _, trg = parser.triples[3] + self.assertEqual(src.name, "b") + self.assertIsInstance(trg, Arc) + + def test_complex(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> b: c; <- d: e: f;;")) + self.assertEqual(len(parser.triples), 5) + + src, _, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(trg.name, "c") + + src, _, trg = parser.triples[1] + self.assertEqual(src.name, "b") + self.assertIsInstance(trg, Arc) + + src, _, trg = parser.triples[2] + self.assertEqual(src.name, "f") + self.assertEqual(trg.name, "a") + + src, _, trg = parser.triples[3] + self.assertEqual(src.name, "d") + self.assertIsInstance(trg, Arc) + + src, _, trg = parser.triples[4] + self.assertEqual(src.name, "e") + self.assertIsInstance(trg, Arc) diff --git a/db/tests/units/scs/test_scs_level5.py b/db/tests/units/scs/test_scs_level5.py new file mode 100644 index 0000000..d5a5e1c --- /dev/null +++ b/db/tests/units/scs/test_scs_level5.py @@ -0,0 +1,37 @@ +import unittest + +from sc.scs.parser import SCsParser +from sc.scs.types import * + + +class TestSCsLevel5(unittest.TestCase): + + def test_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse(( + "set ~> attr:: item" + " (* -/> subitem;;" + " <= subitem2;; *);;"))) + + self.assertEqual(len(parser.triples), 4) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "item") + self.assertEqual(edge.connector, "-/>") + self.assertEqual(trg.name, "subitem") + + src, edge, trg = parser.triples[1] + self.assertEqual(src.name, "subitem2") + self.assertEqual(edge.connector, "=>") + self.assertEqual(trg.name, "item") + + src, edge, trg = parser.triples[2] + self.assertEqual(src.name, "set") + self.assertIsInstance(edge, Arc) + self.assertEqual(trg.name, "item") + + src, edge, trg = parser.triples[3] + self.assertEqual(src.name, "attr") + self.assertEqual(edge.connector, "_->") + self.assertIsInstance(trg, Arc) diff --git a/db/tests/units/scs/test_scs_level6.py b/db/tests/units/scs/test_scs_level6.py new file mode 100644 index 0000000..6da92af --- /dev/null +++ b/db/tests/units/scs/test_scs_level6.py @@ -0,0 +1,235 @@ +from ast import parse +import unittest + +from sc.scs.parser import SCsParser +from sc.scs.types import * + + +class TestSCsLevel6(unittest.TestCase): + + def test_smoke(self): + tests = ["z -> [**];;", + "x -> [test*];;", + "@a = [\\[* r-> b;; *\\]];;", + "@alias = u;; @alias -> [* x -> [* y -> z;; *];; *];;", + "y <= nrel_main_idtf: [y*];;", + "a -> [* z -> [begin*];; *];;", + "a -> [* b -> c;; *];;"] + for t in tests: + parser = SCsParser() + self.assertTrue(parser.parse(t)) + + def test_set(self): + parser = SCsParser() + + self.assertTrue(parser.parse("@set = { a; b: c; d: e: f };;")) + self.assertEqual(len(parser.triples), 6) + + common_src = parser.triples[0][0] + + src, edge, trg = parser.triples[0] + self.assertIsInstance(src, Node) + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "a") + + src, edge, trg = parser.triples[1] + self.assertIsInstance(src, Node) + self.assertEqual(src.name, common_src.name) + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "c") + + src, edge, trg = parser.triples[2] + self.assertEqual(src.name, "b") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + + src, edge, trg = parser.triples[3] + self.assertIsInstance(src, Node) + self.assertEqual(src.name, common_src.name) + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "f") + + src, edge, trg = parser.triples[4] + self.assertEqual(src.name, "d") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + + src, edge, trg = parser.triples[5] + self.assertEqual(src.name, "e") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Arc) + + def test_set_recursive(self): + parser = SCsParser() + + self.assertTrue(parser.parse("@set = { a; {b} };;")) + self.assertEqual(len(parser.triples), 3) + + src, edge, trg = parser.triples[0] + self.assertIsInstance(src, Node) + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Node) + + src, edge, trg = parser.triples[1] + self.assertIsInstance(src, Node) + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "b") + + prev_src = src + + src, edge, trg = parser.triples[2] + self.assertEqual(prev_src.name, trg.name) + + self.assertIsInstance(src, Node) + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Node) + + def test_content_empty(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> [];;")) + self.assertEqual(len(parser.triples), 1) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Link) + self.assertEqual(trg.value, "") + self.assertTrue(trg.is_const()) + + def test_content_simple(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> [simple];;")) + self.assertEqual(len(parser.triples), 1) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Link) + self.assertEqual(trg.value, "simple") + self.assertTrue(trg.is_const()) + + def test_content_var(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> _[simple];;")) + self.assertEqual(len(parser.triples), 1) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Link) + self.assertEqual(trg.value, "simple") + self.assertTrue(trg.is_var()) + + def test_content_multiline(self): + parser = SCsParser() + + self.assertTrue(parser.parse(( + "a -> [simple\n" + " multiline];;"))) + self.assertEqual(len(parser.triples), 1) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "a") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Link) + self.assertEqual(len(trg.value.split('\n')), 2) + self.assertTrue(trg.is_const()) + + def test_content_escaping(self): + tests = [ + ("x -> _[\\[test\\]];;", "[test]"), + ("x -> _[\\\\\\[test\\\\\\]];;", "\\[test\\]") + ] + + for input, output in tests: + parser = SCsParser() + self.assertTrue(parser.parse(input)) + self.assertEqual(len(parser.triples), 1) + + _, _, link = parser.triples[0] + self.assertIsInstance(link, Link) + self.assertEqual(link.value, output) + + def test_content_error(self): + tests = [ + "@alias = [;;", + "x -> y (* -> name: [name ;;", + ] + + for t in tests: + parser = SCsParser() + self.assertFalse(parser.parse(t)) + self.assertGreater(len(parser.errors), 0) + + def test_contour_empty(self): + parser = SCsParser() + + self.assertTrue(parser.parse("x -> [**];;")) + self.assertEqual(len(parser.triples), 1) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "x") + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Node) + + def test_contour_simple(self): + parser = SCsParser() + + self.assertTrue(parser.parse("x -|> [* y _=> z;; *];;")) + self.assertEqual(len(parser.triples), 5) + + src, edge, trg = parser.triples[0] + self.assertEqual(src.name, "y") + self.assertEqual(edge.connector, "_=>") + self.assertEqual(trg.name, "z") + + _, edge, trg = parser.triples[1] + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "y") + + _, edge, trg = parser.triples[2] + self.assertEqual(edge.connector, "->") + self.assertIsInstance(trg, Edge) + self.assertEqual(trg.connector, "_=>") + + _, edge, trg = parser.triples[3] + self.assertEqual(edge.connector, "->") + self.assertEqual(trg.name, "z") + + src, edge, trg = parser.triples[4] + self.assertEqual(src.name, "x") + self.assertEqual(edge.connector, "-|>") + self.assertIsInstance(trg, Node) + + + def test_countour_recursive(self): + parser = SCsParser() + + self.assertTrue(parser.parse("x ~|> [* y _=> [* k ~> z;; *];; *];;")) + self.assertEqual(len(parser.triples), 9) + + def test_countour_with_content(self): + parser = SCsParser() + + self.assertTrue(parser.parse("x -> [* y _=> [test*];; *];;")) + self.assertEqual(len(parser.triples), 5) + + _, edge, trg = parser.triples[0] + self.assertIsInstance(trg, Link) + self.assertEqual(trg.value, "test*") + self.assertEqual(edge.connector, "_=>") + + def test_countour_error(self): + tests = [ + "x -> [* y -> z *];;", + "y -> [* z -> [* *];;", + "x -> [* y -> z;; ];;", + ] + + for t in tests: + parser = SCsParser() + self.assertFalse(parser.parse(t)) + self.assertEqual(len(parser.errors), 1) diff --git a/db/tests/units/scs/test_scs_link.py b/db/tests/units/scs/test_scs_link.py new file mode 100644 index 0000000..1b054b6 --- /dev/null +++ b/db/tests/units/scs/test_scs_link.py @@ -0,0 +1,29 @@ +import unittest + +from sc.scs.parser import SCsParser +from sc.scs.types import Arc, Edge, Element, Link, Node + + +class TestSCsLink(unittest.TestCase): + + def test_link_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> [value];;")) + self.assertEqual(len(parser.triples), 1) + + _, _, link = parser.triples[0] + self.assertIsInstance(link, Link) + self.assertEqual(link.type, Link.Type.STRING) + self.assertEqual(link.value, "value") + + def test_url_smoke(self): + parser = SCsParser() + + self.assertTrue(parser.parse("a -> \"url://value\";;")) + self.assertEqual(len(parser.triples), 1) + + _, _, link = parser.triples[0] + self.assertIsInstance(link, Link) + self.assertEqual(link.type, Link.Type.URL) + self.assertEqual(link.value, "url://value") diff --git a/db/tests/units/scs/test_scs_parser.py b/db/tests/units/scs/test_scs_parser.py new file mode 100644 index 0000000..ce2356b --- /dev/null +++ b/db/tests/units/scs/test_scs_parser.py @@ -0,0 +1,43 @@ +import unittest + +from sc import SCsParser + + +class TestSCsParser(unittest.TestCase): + + def test_smoke(self): + parser = SCsParser() + self.assertTrue(parser.parse("x -> y;;")) + + def test_smoke_error(self): + parser = SCsParser() + self.assertFalse(parser.parse("x -> y;")) + self.assertEqual(len(parser.errors), 1) + + def test_comments(self): + parser = SCsParser() + self.assertTrue(parser.parse(( + "//Level1\n" + "a -> b;;/* example */\n" + "c <> d;;" + ))) + self.assertEqual(len(parser.triples), 2) + + def test_links(self): + parser = SCsParser() + self.assertTrue(parser.parse(( + "a -> \"file://data.txt\";;" + "b -> [x];;" + "c -> _[];;" + "d -> [];;" + ))) + self.assertEqual(len(parser.triples), 4) + + def test_type_no_error(self): + parser = SCsParser() + self.assertTrue(parser.parse(( + "a <- sc_node_abstract;;" + "a <- sc_node_role_relation;;" + ))) + self.assertFalse(parser.has_errors()) + self.assertEqual(len(parser.triples), 2) diff --git a/db/tests/units/scs/test_scs_types.py b/db/tests/units/scs/test_scs_types.py new file mode 100644 index 0000000..25dda5d --- /dev/null +++ b/db/tests/units/scs/test_scs_types.py @@ -0,0 +1,44 @@ +import unittest + +from sc.scs.types import * + + +class TestSCsElement(unittest.TestCase): + + def test_element_idtf(self): + el = Element(Element.Kind.NODE, name="_idtf", ctx=None) + self.assertTrue(el.is_var()) + + el = Element(Element.Kind.NODE, name=".._idtf", ctx=None) + self.assertTrue(el.is_var()) + + el = Element(Element.Kind.NODE, name="._idtf", ctx=None) + self.assertTrue(el.is_var()) + + el = Element(Element.Kind.NODE, name="_idtf", ctx=None) + self.assertTrue(el.is_var()) + + el = Element(Element.Kind.NODE, name="...", ctx=None) + self.assertTrue(el.is_const()) + + el = Element(Element.Kind.NODE, name="..idtf", ctx=None) + self.assertTrue(el.is_const()) + + el = Element(Element.Kind.NODE, name=".idtf", ctx=None) + self.assertTrue(el.is_const()) + + def test_arc_reverse(self): + arc = Arc('->', '', TokenContext(0, 0, '')) + self.assertFalse(arc._reverse_if_back()) + + arc.connector = '<-' + self.assertTrue(arc._reverse_if_back()) + self.assertEqual(arc.connector, '->') + + def test_edge_reverse(self): + edge = Edge('_=>', '', TokenContext(0, 0, '')) + self.assertFalse(edge._reverse_if_back()) + + edge.connector = '_<=' + self.assertTrue(edge._reverse_if_back()) + self.assertEqual(edge.connector, '_=>') diff --git a/db/tools/bootstrap/step.py b/db/tools/bootstrap/step.py index dfc5189..4398b54 100644 --- a/db/tools/bootstrap/step.py +++ b/db/tools/bootstrap/step.py @@ -2,7 +2,7 @@ import neo4j from sc.core.keywords import Labels, TypeAttrs -from sc.core.keynodes import Keynodes +from sc.core.keynodes import KeynodeNames from sc.core.config import Config from typing import Union @@ -31,7 +31,7 @@ def wrap_enum(e) -> object: return result return templ.render( - Keynodes=Keynodes, + KeynodeNames=KeynodeNames, Labels=Labels, TypeConst=wrap_enum(TypeConst), TypeNodeStruct=wrap_enum(TypeNodeStruct), diff --git a/db/tools/bootstrap/steps/0001-sys_idtf.cypher.jinja b/db/tools/bootstrap/steps/0001-sys_idtf.cypher.jinja index b7a7939..cff7aca 100644 --- a/db/tools/bootstrap/steps/0001-sys_idtf.cypher.jinja +++ b/db/tools/bootstrap/steps/0001-sys_idtf.cypher.jinja @@ -1,7 +1,7 @@ {% import 'lib/types.cypher.jinja' as types with context %} CREATE - (link:{{Labels.SC_LINK}} {content: "{{Keynodes.NREL_SYS_IDTF}}", is_url: false, type: "str", {{ types.const_attr(TypeConst.CONST) }} }), + (link:{{Labels.SC_LINK}} {content: "{{KeynodeNames.NREL_SYS_IDTF}}", is_url: false, type: "str", {{ types.const_attr(TypeConst.CONST) }} }), (node:{{Labels.SC_NODE}} { {{ types.node_const_no_role_attrs() }} }), (node)-[edge:{{Labels.SC_ARC}} { {{ types.const_attr(TypeConst.CONST) }} }]->(link) WITH edge, node diff --git a/db/tools/bootstrap/steps/0010-types.cypher.jinja b/db/tools/bootstrap/steps/0010-types.cypher.jinja index 8f99858..ecc79da 100644 --- a/db/tools/bootstrap/steps/0010-types.cypher.jinja +++ b/db/tools/bootstrap/steps/0010-types.cypher.jinja @@ -1,13 +1,11 @@ {% import 'lib/names.cypher.jinja' as names with context %} {% import 'lib/types.cypher.jinja' as types with context %} -{% set keynodes_list = [ - 'sc_node', 'sc_link', 'sc_edge', 'sc_arc', 'sc_arc_member', - 'sc_const', 'sc_var', - 'sc_node_struct', 'sc_node_tuple', 'sc_node_role', 'sc_node_no_role', 'sc_node_class', 'sc_node_abstract', 'sc_node_material', - 'sc_edge_perm', 'sc_edge_temp', - 'sc_edge_pos', 'sc_edge_neg', 'sc_edge_fuz' - ]%} +{% set keynodes_list = KeynodeNames.CORE_TYPES + + KeynodeNames.CONST_TYPES + + KeynodeNames.NODE_TYPES + + KeynodeNames.ARC_PERM_TYPES + + KeynodeNames.ARC_POS_TYPES %} {# find system identifier keynode #} {{ names.find_sys_idtf_rel(found_name="found_nrel_sys_idtf") }} diff --git a/db/tools/bootstrap/steps/lib/names.cypher.jinja b/db/tools/bootstrap/steps/lib/names.cypher.jinja index 4af37a0..75f1b64 100644 --- a/db/tools/bootstrap/steps/lib/names.cypher.jinja +++ b/db/tools/bootstrap/steps/lib/names.cypher.jinja @@ -9,7 +9,7 @@ MATCH -[:{{Labels.SC_ARC}} { {{ types.arc_member_const_pos_perm() }} }]-> (edge_sock_{{found_name}}) WHERE - (link_{{found_name}}.content = '{{Keynodes.NREL_SYS_IDTF}}') AND + (link_{{found_name}}.content = '{{KeynodeNames.NREL_SYS_IDTF}}') AND edge_sock_{{found_name}}.edge_id = id(edge_{{found_name}}) {%- endmacro %} diff --git a/docs/high_level_arch.drawio b/docs/high_level_arch.drawio new file mode 100644 index 0000000..0b842d2 --- /dev/null +++ b/docs/high_level_arch.drawio @@ -0,0 +1 @@ +5Vtbc6M2FP41eTQjJAT40bm47TTb7kza2fYpI0DGbAAxIMf2/voecXEgKLaTgp1k8dgDRxKg79w+XXxBrpLNLznLll9EwOMLjILNBbm+wHhq2fCrBNtKYDu4EoR5FFQi80lwF/3gtRDV0lUU8KJTUQoRyyjrCn2RptyXHRnLc7HuVluIuPvUjIW8J7jzWdyXfosCuaylDkJPBb/yKFw2jzabkoQ1tWtBsWSBWLdE5OaCXOVCyOos2VzxWIHXAFO1m79QunuznKfymAb307/W4cKMb+fL6OZ6ubn3wt8mplPd5pHFq7rL9dvKbYNBLlZpwNVd0AW5XC8jye8y5qvSNSgdZEuZxHBlwukiiuMrEYu8bEsWVH1AXshcPPBWiV0eqoVIZUteHSCvX4znkm9e7LK5AxIskIuEy3wLVeoG2K2xr62PWPX1+kmXxK5ly7YanVrIavsJd/d+QhhOapBfAzgZF/CAcXfhawH3Xe4tBgLW7gJLiQZYVwMsRmMBewSuPADfri9FLpciFCmLb56kl13kn+rcCpHVeH/nUm7rQMVWUnS1UT1TPWg/svBeYpX7fE+H6veXLA+53GdRek3lPGYyeuy+x/DmbJ8DZcAy3/6j2hu0ufy3XXa9qW9eXW3bV195HkHneV4Lz6Ey65w6wz1PubtSKTSOVIcxiiMvZ/VzhgpLHvPdgOjCEgRliwbDhCUyfRaWaD8smVgTluhYUYkejkrACzJ1GiUlF7lUCETAQG6Zx+OvoohkJFIo94SUIoEKsSq4ZP5DWGqknW3Lo3WPWRyFqq1UjnXJiqziSItoo/R4WT5y1khRI4HzpZSKYc1U7/GcZVks4J5GBh2er7MJ0C1ZGst8BSUsKOAMI4XsHIGK5g9RUWRpOMm24NspVE+yFXjcJIKGxSTLRZizJImgRszScAXPnPAN91eSTWiAFogxx3fchUcMZJmWS2zTpLaJbdul1DUda2IibG3UjwGPGcZ4qPnMeDRkgZ7Sdo6gZh8ro9lHhkd81uho/7Sw03Pi3of9G/dU93j+GEH/hsxGtuXYrqPLRsS0CLX6o5JdZB0hS9maQKPNUvZYkWb62UzePdLk7XNavNtDfZ6XeTUY1NhNj5kc64wdIftmNj+tsTvHUrLRjN00P5u1N8TloLm75zT35i1buP8Nwb2HfcOHV0k886WyycOUWM93W2iLlYyjFIy/mSZEw5g3ZIuOebsa8yYnte7+6O5FhN/xiKMaVhjr6AHG6kHEDJGHaugB15m6hnMYVCQwmoAzuVwlQBXmRH19+JnHrHi4j8EjjeJRtYMIg7LNpCsfcOiAj8joSGMGZGpgMpYl9PNLzxKUPrMXAagn1ZnXVEevBYaYxCBWB5vdfGsLGzyFapCOmsPsI+WO5jA9lMqVDBBdX77oOf4WoknAc3I4G3tVorj1doKdH/1ZBaVaXlSZwqSaSV3kc30KpzY1HX6sFe+xkxdVaGr0pZsyH01B1s8X0Lj6QmKZ/8GF9X2iAta9r97xNSHrLcqGXGWg9mF3fNfUmILy3L41YGI4dCyP7VvEPAJTwHYslX8BpbBDdXYH7EHp4p148cL1ua9dmvFcatGjGclbFKuhJSd1YvNzTIS+iZaUX0VLZgn7IdLJHZnc9qhJv2w0X++s0RnTvhVY2HDskQwB90cBLxpCEaVhzGdqGf+wh5ar/c0afbUuVIrqARgyTKLxSQ9xwm3t4Ji7yHUH4odWlx9aSMOBdPwQj+WPuE97fmeLB/ZeYmVAuRtYOr242CPlvoEB9LKbjNjDTU8aJhtnaKmlpAClTnLOkqKnIOi97OLfhSwVCu8OvrWI1THRB7R4rgmWSRQE5WSHTuHdCZABlGE/d5Imzx9QBhlNGT8h82xyVrmSpxjoDLS+5PcPKjjsMpZtqoz1rGS44bSD3a5batiLbsqQYGMszon7/EVNXhV90vl+Ro0LFjhMzzctmyJrGGU932Nla7YCnTSGOv0Y2tPIGfeoPO1KOc8eFVITvsPLgWfdWNS8ZkuLN4/V/pQvLIU41584fv/7U5xn84TY6fuKfn/KWHHNIh/JNwZ0g4ZsHHSDyhDPtirufiT9jB276LFbGc4aupwP5VPvR2dn3RTpHLHtZ+zVEvpsV7Vu6l3LpUbbrW71Z2tmIfQGeC+6ileF/L9pWD/WOfGfCPYYxN7ZsxPros9rS12AyBxWCbtx/0j/LXgV3s1t6NSYTimhjmlhlzrI6biKZtAxPeVcAe2Hj4+zqfpN+kDvC/8Ps1l2b/Y5TFvxkdmU7lff68cTF2p3WvNftbKs9Y8/cvMf \ No newline at end of file diff --git a/packages.txt b/packages.txt index f1b68b1..ae3df4d 100644 --- a/packages.txt +++ b/packages.txt @@ -1,3 +1,4 @@ +antlr4-python3-runtime Jinja2 neo4j termcolor diff --git a/web/backend/.gitignore b/web/backend/.gitignore deleted file mode 100644 index 3507a4c..0000000 --- a/web/backend/.gitignore +++ /dev/null @@ -1 +0,0 @@ -config.ini \ No newline at end of file