Replaced ANTLR parser with custom class

This commit is contained in:
mdnapo 2024-06-01 10:54:44 +02:00
parent 78f3345419
commit c85da84272
28 changed files with 366 additions and 3298 deletions

View File

@ -1,4 +1,5 @@
using Humanizer; using Humanizer;
using MycroForge.CLI.Extensions;
namespace MycroForge.CLI.CodeGen; namespace MycroForge.CLI.CodeGen;
@ -8,7 +9,7 @@ public class CrudRouterGenerator
[ [
"from typing import Annotated", "from typing import Annotated",
"from fastapi import APIRouter, Depends", "from fastapi import APIRouter, Depends",
"from fastapi.responses import JSONResponse", "from fastapi.responses import JSONResponse, Response",
"from fastapi.encoders import jsonable_encoder", "from fastapi.encoders import jsonable_encoder",
"from %service_import_path% import %service_class_name%", "from %service_import_path% import %service_class_name%",
"from %create_entity_request_import_path% import %create_entity_request_class_name%", "from %create_entity_request_import_path% import %create_entity_request_class_name%",
@ -53,12 +54,13 @@ public class CrudRouterGenerator
"", "",
"@router.patch(\"/{id}\")", "@router.patch(\"/{id}\")",
"async def update(", "async def update(",
"\tid: int,",
"\trequest: Update%entity_class_name%Request,", "\trequest: Update%entity_class_name%Request,",
"\tservice: Annotated[%service_class_name%, Depends(%service_class_name%)]", "\tservice: Annotated[%service_class_name%, Depends(%service_class_name%)]",
"):", "):",
"\ttry:", "\ttry:",
"\t\tupdated = await service.update(request.model_dump(exclude_unset=True))", "\t\tupdated = await service.update(id, request.model_dump(exclude_unset=True))",
"\t\treturn JSONResponse(status_code=204 if updated else 404, content=None)", "\t\treturn Response(status_code=204 if updated else 404)",
"\texcept Exception as ex:", "\texcept Exception as ex:",
"\t\tprint(str(ex))", "\t\tprint(str(ex))",
"\t\treturn JSONResponse(status_code=500, content=str(ex))", "\t\treturn JSONResponse(status_code=500, content=str(ex))",
@ -93,23 +95,27 @@ public class CrudRouterGenerator
var servicesFolderPath = $"{Features.Api.FeatureName}/services/{path}"; var servicesFolderPath = $"{Features.Api.FeatureName}/services/{path}";
var serviceFilePath = $"{servicesFolderPath}/{entitySnakeCaseName}_service.py"; var serviceFilePath = $"{servicesFolderPath}/{entitySnakeCaseName}_service.py";
var serviceImportPath = serviceFilePath var serviceImportPath = serviceFilePath
.Replace('/', '.') .Replace('/', '.')
.Replace('\\', '.') .Replace('\\', '.')
.Replace(".py", string.Empty) .Replace(".py", string.Empty)
.Trim(); .DeduplicateDots()
.Trim();
var routersFolderPath = $"{Features.Api.FeatureName}/routers/{path}"; var routersFolderPath = $"{Features.Api.FeatureName}/routers/{path}";
var routerFilePath = $"{routersFolderPath}/{entitySnakeCaseName}.py"; var routerFilePath = $"{routersFolderPath}/{entitySnakeCaseName}.py";
var routerImportPath = routersFolderPath var routerImportPath = routersFolderPath
.Replace('/', '.') .Replace('/', '.')
.Replace('\\', '.') .Replace('\\', '.')
.Trim(); .Replace(".py", "")
.DeduplicateDots()
.Trim();
var requestsFolderPath = $"{Features.Api.FeatureName}/requests/{path}"; var requestsFolderPath = $"{Features.Api.FeatureName}/requests/{path}";
var createRequestImportPath = $"{requestsFolderPath}/Create{entityClassName}Request" var createRequestImportPath = $"{requestsFolderPath}/Create{entityClassName}Request"
.Replace('/', '.') .Replace('/', '.')
.Replace('\\', '.') .Replace('\\', '.')
.DeduplicateDots()
.Underscore() .Underscore()
.ToLower(); .ToLower();
var createRequestClassName = $"Create{entityClassName}Request"; var createRequestClassName = $"Create{entityClassName}Request";
@ -117,28 +123,28 @@ public class CrudRouterGenerator
var updateRequestImportPath = $"{requestsFolderPath}/Update{entityClassName}Request" var updateRequestImportPath = $"{requestsFolderPath}/Update{entityClassName}Request"
.Replace('/', '.') .Replace('/', '.')
.Replace('\\', '.') .Replace('\\', '.')
.DeduplicateDots()
.Underscore() .Underscore()
.ToLower(); .ToLower();
var updateRequestClassName = $"Update{entityClassName}Request"; var updateRequestClassName = $"Update{entityClassName}Request";
var router = string.Join("\n", Template) var router = string.Join("\n", Template)
.Replace("%service_import_path%", serviceImportPath) .Replace("%service_import_path%", serviceImportPath)
.Replace("%entity_class_name%", entityClassName) .Replace("%entity_class_name%", entityClassName)
.Replace("%service_class_name%", serviceClassName) .Replace("%service_class_name%", serviceClassName)
.Replace("%create_entity_request_import_path%", createRequestImportPath) .Replace("%create_entity_request_import_path%", createRequestImportPath)
.Replace("%create_entity_request_class_name%", createRequestClassName) .Replace("%create_entity_request_class_name%", createRequestClassName)
.Replace("%update_entity_request_import_path%", updateRequestImportPath) .Replace("%update_entity_request_import_path%", updateRequestImportPath)
.Replace("%update_entity_request_class_name%", updateRequestClassName); .Replace("%update_entity_request_class_name%", updateRequestClassName);
await _context.CreateFile(routerFilePath, router); await _context.CreateFile(routerFilePath, router);
var main = await _context.ReadFile("main.py"); var main = await _context.ReadFile("main.py");
main += string.Join('\n', [ main = new MainModifier(main).Initialize()
"", .Import(from: routerImportPath, import: entitySnakeCaseName)
$"from {routerImportPath} import {entitySnakeCaseName}", .IncludeRouter(prefix: entityRoutePrefix, router: entitySnakeCaseName)
$"app.include_router(prefix=\"/{entityRoutePrefix}\", router={entitySnakeCaseName}.router)" .Rewrite();
]);
await _context.WriteFile("main.py", main); await _context.WriteFile("main.py", main);
} }

View File

@ -1,4 +1,5 @@
using Humanizer; using Humanizer;
using MycroForge.CLI.Extensions;
namespace MycroForge.CLI.CodeGen; namespace MycroForge.CLI.CodeGen;
@ -74,6 +75,7 @@ public class CrudServiceGenerator
.Replace('/', '.') .Replace('/', '.')
.Replace('\\', '.') .Replace('\\', '.')
.Replace(".py", string.Empty) .Replace(".py", string.Empty)
.DeduplicateDots()
.Trim(); .Trim();
var servicesFolderPath = $"{Features.Api.FeatureName}/services/{path}"; var servicesFolderPath = $"{Features.Api.FeatureName}/services/{path}";

View File

@ -1,78 +1,75 @@
using MycroForge.Parsing; namespace MycroForge.CLI.CodeGen;
namespace MycroForge.CLI.CodeGen; public class DbEnvInitializer
public class DbEnvInitializer : PythonSourceModifier
{ {
private PythonParser.Import_fromContext? _alembicImport; private readonly Source _source;
private PythonParser.AssignmentContext? _targetMetaDataAssignment; private SourceMatch? _alembicImport;
private PythonParser.AssignmentContext? _urlAssignmentContext; private SourceMatch? _targetMetaDataAssignment;
private PythonParser.AssignmentContext? _connectableAssignmentContext; private SourceMatch? _urlAssignmentContext;
private SourceMatch? _connectableAssignmentContext;
public DbEnvInitializer(string source) : base(source) public DbEnvInitializer(string source)
{ {
_source = new Source(source);
} }
public override string Rewrite() public string Rewrite()
{ {
var tree = Parser.file_input(); InsertDefaultDbImports();
Visit(tree);
if (_alembicImport is null)
throw new Exception("Could not find import insertion point.");
if (_targetMetaDataAssignment is null) InsertTargetMetadata();
throw new Exception("Could not find metadata insertion point.");
if (_urlAssignmentContext is null) InsertConnectionstringFromDbSettings();
throw new Exception("Could not find url insertion point.");
InsertSqlAlchemyConfigUpdate();
return _source.Text;
}
private void InsertSqlAlchemyConfigUpdate()
{
_connectableAssignmentContext = _source.Find(@"connectable\s*=\s*.+\(\s*");
if (_connectableAssignmentContext is null) if (_connectableAssignmentContext is null)
throw new Exception("Could not find connectable insertion point."); throw new Exception("Could not find connectable insertion point.");
Rewrite(_alembicImport, [
GetOriginalText(_alembicImport),
$"from {Features.Db.FeatureName}.settings import DbSettings",
$"from {Features.Db.FeatureName}.entities.entity_base import EntityBase"
]);
Rewrite(_targetMetaDataAssignment, "target_metadata = EntityBase.metadata");
Rewrite(_urlAssignmentContext, "url = DbSettings.get_connectionstring()");
const string indent = " "; const string indent = " ";
Rewrite(_connectableAssignmentContext, [ _source.InsertMultiLine(_connectableAssignmentContext.StartIndex, [
"url = DbSettings.get_connectionstring()", $"url = DbSettings.get_connectionstring()",
$"{indent}context.config.set_main_option('sqlalchemy.url', url)", $"{indent}context.config.set_main_option('sqlalchemy.url', url)\n{indent}",
$"{indent}{GetOriginalText(_connectableAssignmentContext)}"
]); ]);
return Rewriter.GetText();
} }
public override object? VisitImport_from(PythonParser.Import_fromContext context) private void InsertConnectionstringFromDbSettings()
{ {
var text = GetOriginalText(context); _urlAssignmentContext = _source.Find("""url\s*=\s*config\s*.+\s*get_main_option\(\s*"sqlalchemy.url"\s*\)""");
if (text == "from alembic import context") if (_urlAssignmentContext is null)
_alembicImport = context; throw new Exception("Could not find url insertion point.");
return base.VisitImport_from(context); _source.Replace(_urlAssignmentContext, "url = DbSettings.get_connectionstring()");
} }
public override object? VisitAssignment(PythonParser.AssignmentContext context) private void InsertTargetMetadata()
{ {
var text = GetOriginalText(context); _targetMetaDataAssignment = _source.Find(@"target_metadata\s*=\s*None");
if (text == "target_metadata = None") if (_targetMetaDataAssignment is null)
_targetMetaDataAssignment = context; throw new Exception("Could not find metadata insertion point.");
else if (text == "url = config.get_main_option(\"sqlalchemy.url\")")
_urlAssignmentContext = context;
else if (text.StartsWith("connectable =")) _source.Replace(_targetMetaDataAssignment, "target_metadata = EntityBase.metadata");
_connectableAssignmentContext = context; }
return base.VisitAssignment(context); private void InsertDefaultDbImports()
{
_alembicImport = _source.Find(@"from\s+alembic\s+import\s+context");
if (_alembicImport is null)
throw new Exception("Could not find import insertion point.");
_source.InsertMultiLine(_alembicImport.EndIndex, [
$"\nfrom {Features.Db.FeatureName}.settings import DbSettings",
$"from {Features.Db.FeatureName}.entities.entity_base import EntityBase\n"
]);
} }
} }

View File

@ -1,44 +1,38 @@
using MycroForge.Parsing; namespace MycroForge.CLI.CodeGen;
namespace MycroForge.CLI.CodeGen; public class DbEnvModifier
public class DbEnvModifier : PythonSourceModifier
{ {
private readonly Source _source;
private readonly string _importPath; private readonly string _importPath;
private readonly string _className; private readonly string _className;
private PythonParser.Import_fromContext? _lastImport; private SourceMatch? _lastImport;
public DbEnvModifier(string source, string importPath, string className) : base(source) public DbEnvModifier(string source, string importPath, string className)
{ {
_source = new Source(source);
_importPath = importPath; _importPath = importPath;
_className = className; _className = className;
} }
public override string Rewrite() public string Rewrite()
{ {
var tree = Parser.file_input(); InsertEntityImport();
Visit(tree);
return _source.Text;
}
private void InsertEntityImport()
{
_lastImport = _source
.FindAll($@"from\s+{Features.Db.FeatureName}\.entities(?:\..+)+\s*import\s+.+\s")
.LastOrDefault();
if (_lastImport is null) if (_lastImport is null)
throw new Exception("Could not find import insertion point."); throw new Exception("Could not find import insertion point.");
var lastImportText = GetOriginalText(_lastImport);
Rewrite(_lastImport, [ _source.InsertSingleLine(
lastImportText, _lastImport.EndIndex,
$"from {Features.Db.FeatureName}.entities.{_importPath} import {_className}" $"from {_importPath} import {_className}\n"
]); );
return Rewriter.GetText();
}
public override object? VisitImport_from(PythonParser.Import_fromContext context)
{
var text = GetOriginalText(context);
if (text.StartsWith($"from {Features.Db.FeatureName}.entities"))
_lastImport = context;
return base.VisitImport_from(context);
} }
} }

View File

@ -1,38 +1,38 @@
using Humanizer; using Humanizer;
using MycroForge.Parsing;
namespace MycroForge.CLI.CodeGen; namespace MycroForge.CLI.CodeGen;
public partial class EntityLinker public partial class EntityLinker
{ {
public class EntityModel : PythonSourceModifier public class EntityModel
{ {
private readonly Source _source;
private readonly string _className; private readonly string _className;
private readonly string _path; private readonly string _path;
private readonly List<PythonParser.Import_fromContext> _importContexts; private readonly List<SourceMatch> _importContexts;
private readonly List<string> _importsBuffer; private readonly List<string> _importsBuffer;
private PythonParser.Import_fromContext LastImport => _importContexts.Last(); private readonly List<SourceMatch> _classContexts;
private SourceMatch _tableContext;
private readonly List<PythonParser.Class_defContext> _classContexts; private readonly List<SourceMatch> _columnContexts;
private PythonParser.AssignmentContext _tableContext;
private readonly List<PythonParser.AssignmentContext> _columnContexts;
private readonly List<string> _columnsBuffer; private readonly List<string> _columnsBuffer;
private PythonParser.AssignmentContext LastColumn => _columnContexts.Last();
private SourceMatch LastColumn => _columnContexts.Last();
private SourceMatch LastImport => _importContexts.Last();
public string ClassName => _className; public string ClassName => _className;
public string Path => _path; public string Path => _path;
public string FieldName => _className.Underscore().ToLower(); public string FieldName => _className.Underscore().ToLower();
public string TableName => GetOriginalText(_tableContext) public string TableName => _tableContext.Text
.Replace("__tablename__", string.Empty) .Replace("__tablename__", string.Empty)
.Replace("=", string.Empty) .Replace("=", string.Empty)
.Replace("\"", string.Empty) .Replace("\"", string.Empty)
.Trim(); .Trim();
public EntityModel(string className, string path, string source) : base(source) public EntityModel(string className, string path, string source)
{ {
_source = new Source(source);
_className = className; _className = className;
_path = path; _path = path;
_importContexts = new(); _importContexts = new();
@ -41,34 +41,61 @@ public partial class EntityLinker
_tableContext = default!; _tableContext = default!;
_columnContexts = new(); _columnContexts = new();
_columnsBuffer = new(); _columnsBuffer = new();
var classContexts = _source.FindAll(@"class\s+.*\s*\(EntityBase\)\s*:\s");
if (!classContexts.Any(c => c.Text.Contains(_className)))
throw new Exception($"Entity {_className} was not found in {_path}.");
} }
public void Initialize() public void Initialize()
{ {
var tree = Parser.file_input(); ReadImports();
Visit(tree); AssertClassExists();
if (!_classContexts.Any(c => GetOriginalText(c).Contains(_className))) AssertTableNameExists();
ReadColumns();
InsertRelationshipImport();
InsertForeignKeyImport();
}
private void ReadImports()
{
_importContexts.AddRange(_source.FindAll(@"from\s+.+\s+import\s+.*\s"));
}
private void AssertClassExists()
{
_classContexts.AddRange(_source.FindAll(@"class\s+.+\s*\(EntityBase\)\s*:\s"));
if (!_classContexts.Any(c => c.Text.Contains(_className)))
throw new Exception($"Entity {_className} was not found in {_path}."); throw new Exception($"Entity {_className} was not found in {_path}.");
}
private void AssertTableNameExists()
{
_tableContext = _source.Find(@"__tablename__\s*=\s*.+\s");
if (string.IsNullOrEmpty(_tableContext.Text))
throw new Exception($"__tablename__ definition for Entity {_className} was not found in {_path}.");
}
private void ReadColumns()
{
_columnContexts.AddRange(
_source.FindAll(@".+\s*:\s*Mapped\[.+\]\s*=\s*(relationship|mapped_column)\(.+\)\s")
);
if (_columnContexts.Count == 0) if (_columnContexts.Count == 0)
throw new Exception($"Entity {_className} has no columns."); throw new Exception($"Entity {_className} has no columns.");
_importsBuffer.Add(GetOriginalText(LastImport));
_columnsBuffer.Add(GetOriginalText(LastColumn));
InsertRelationshipImport();
InsertForeignKeyImport();
} }
private void InsertRelationshipImport() private void InsertRelationshipImport()
{ {
var relationship = _importContexts.FirstOrDefault(import => var relationship = _importContexts.FirstOrDefault(
{ import => import.Text.Contains("sqlalchemy.orm") && import.Text.Contains("relationship")
var text = GetOriginalText(import); );
return text.Contains("sqlalchemy.orm") && text.Contains("relationship");
});
if (relationship is null) if (relationship is null)
_importsBuffer.Add("from sqlalchemy.orm import relationship"); _importsBuffer.Add("from sqlalchemy.orm import relationship");
@ -76,58 +103,40 @@ public partial class EntityLinker
private void InsertForeignKeyImport() private void InsertForeignKeyImport()
{ {
var foreignKey = _importContexts.FirstOrDefault(import => var foreignKey = _importContexts.FirstOrDefault(
{ import => import.Text.Contains("sqlalchemy") && import.Text.Contains("ForeignKey")
var text = GetOriginalText(import); );
return text.Contains("sqlalchemy") && text.Contains("ForeignKey");
});
if (foreignKey is null) if (foreignKey is null)
_importsBuffer.Add("from sqlalchemy import ForeignKey"); _importsBuffer.Add("from sqlalchemy import ForeignKey");
} }
public override object? VisitImport_from(PythonParser.Import_fromContext context) public void AppendColumn(string text)
{ {
_importContexts.Add(context); _columnsBuffer.Add(text);
return base.VisitImport_from(context);
} }
public override object? VisitClass_def(PythonParser.Class_defContext context) public void AppendColumns(params string[] text)
{ {
_classContexts.Add(context); _columnsBuffer.Add(string.Join('\n', text));
return base.VisitClass_def(context);
} }
public override object? VisitAssignment(PythonParser.AssignmentContext context)
{
var text = GetOriginalText(context);
if (text.StartsWith("__tablename__"))
_tableContext = context;
if (text.Contains("Mapped["))
_columnContexts.Add(context);
return base.VisitAssignment(context);
}
public void AppendColumn(string text) => _columnsBuffer.Add(text);
public void AppendColumns(params string[] text) => _columnsBuffer.Add(string.Join('\n', text));
public void Import(string from, string import) public void Import(string from, string import)
{ {
var exists = _importContexts.Select(GetOriginalText).Any(ctx => ctx.Contains(from) && ctx.Contains(import)); var exists = _importContexts.Any(ctx => ctx.Text.Contains(from) && ctx.Text.Contains(import));
var buffered = _importsBuffer.Any(txt => txt.Contains(from) && txt.Contains(import)); var buffered = _importsBuffer.Any(txt => txt.Contains(from) && txt.Contains(import));
if (!exists && !buffered) if (!exists && !buffered)
_importsBuffer.Add($"from {from} import {import}"); _importsBuffer.Add($"from {from} import {import}");
} }
public override string Rewrite() public string Rewrite()
{ {
Rewrite(LastImport, _importsBuffer.ToArray()); // The order matters! We must first rewrite the columns first,
Rewrite(LastColumn, _columnsBuffer.ToArray()); // so that their indexes are not modified when inserting imports.
return Rewriter.GetText(); _source.InsertMultiLine(LastColumn.EndIndex, _columnsBuffer.Append("\n").ToArray());
_source.InsertMultiLine(LastImport.EndIndex, _importsBuffer.Append("\n").ToArray());
return _source.Text;
} }
} }
} }

View File

@ -112,7 +112,7 @@ public partial class EntityLinker
.Split('.') .Split('.')
; ;
var associationTableImportPath = string.Join('.', associationTableImport[..^1]); var associationTableImportPath = string.Join('.', associationTableImport);
var associationTableImportName = associationTableImport[^1]; var associationTableImportName = associationTableImport[^1];
left.Import(from: string.Join('.', associationTableImportPath), import: associationTableImportName); left.Import(from: string.Join('.', associationTableImportPath), import: associationTableImportName);
@ -130,6 +130,14 @@ public partial class EntityLinker
await _context.WriteFile(left.Path, left.Rewrite()); await _context.WriteFile(left.Path, left.Rewrite());
await _context.WriteFile(right.Path, right.Rewrite()); await _context.WriteFile(right.Path, right.Rewrite());
var env = await _context.ReadFile($"{Features.Db.FeatureName}/env.py");
env = new DbEnvModifier(env, associationTableImportPath, associationTableImportName).Rewrite();
await _context.WriteFile($"{Features.Db.FeatureName}/env.py", env);
var main = await _context.ReadFile("main.py");
main = new MainModifier(main).Initialize().Import(associationTableImportPath, associationTableImportName).Rewrite();
await _context.WriteFile("main.py", main);
} }
private async Task<EntityModel> LoadEntity(string name) private async Task<EntityModel> LoadEntity(string name)
@ -138,11 +146,11 @@ public partial class EntityLinker
if (name.Split(':').Select(s => s.Trim()).ToArray() is { Length: 2 } fullName) if (name.Split(':').Select(s => s.Trim()).ToArray() is { Length: 2 } fullName)
{ {
path = Path.Join(path, fullName[0]); path = Path.Combine(path, fullName[0]);
name = fullName[1]; name = fullName[1];
} }
path = Path.Join(path, $"{name.Underscore().ToLower()}.py"); path = Path.Combine(path, $"{name.Underscore().ToLower()}.py");
var entity = new EntityModel(name, path, await _context.ReadFile(path)); var entity = new EntityModel(name, path, await _context.ReadFile(path));
entity.Initialize(); entity.Initialize();
return entity; return entity;

View File

@ -1,221 +1,81 @@
using Antlr4.Runtime.Tree; namespace MycroForge.CLI.CodeGen;
using MycroForge.Parsing;
namespace MycroForge.CLI.CodeGen; public class MainModifier
public class MainModifier : PythonSourceModifier
{ {
private PythonParser.Import_fromContext? _lastEntityImport; private readonly Source _source;
private PythonParser.Import_fromContext? _lastAssociationImport; private readonly List<string> _importsBuffer;
private PythonParser.Import_fromContext? _lastRouterImport; private readonly List<string> _routerIncludeBuffer;
private PythonParser.Import_fromContext? _lastRouterInclude; private SourceMatch? _lastImport;
private SourceMatch? _lastRouterInclude;
private readonly List<string> _lastEntityImportBuffer; public MainModifier(string source)
private readonly List<string> _lastAssociationImportBuffer;
private readonly List<string> _lastRouterImportBuffer;
private readonly List<string> _lastRouterIncludeBuffer;
public MainModifier(string source) : base(source)
{ {
_lastEntityImportBuffer = new(); _source = new Source(source);
_lastAssociationImportBuffer = new(); _importsBuffer = new();
_lastRouterImportBuffer = new(); _routerIncludeBuffer = new();
_lastRouterIncludeBuffer = new();
} }
public void Initialize() public MainModifier Initialize()
{ {
var tree = Parser.file_input(); _lastImport = _source
.FindAll($@"from\s+.+\s+import\s+.+\s")
.LastOrDefault();
Visit(tree); _lastRouterInclude = _source.Find(@"app\s*\.\s*include_router\((.|\s)+\)\s?");
if (_lastEntityImport is not null) return this;
_lastEntityImportBuffer.Add(GetOriginalText(_lastEntityImport)); }
if (_lastAssociationImport is not null) private string ToImportString(string from, string import) => $"from {from} import {import}\n";
_lastAssociationImportBuffer.Add(GetOriginalText(_lastAssociationImport));
if (_lastRouterImport is not null) public MainModifier Import(string from, string import)
_lastRouterImportBuffer.Add(GetOriginalText(_lastRouterImport)); {
_importsBuffer.Add(ToImportString(from, import));
return this;
}
public MainModifier IncludeRouter(string prefix, string router)
{
_routerIncludeBuffer.Add($"\napp.include_router(prefix=\"/{prefix}\", router={router}.router)\n");
return this;
}
public string Rewrite()
{
InsertIncludes();
InsertImports();
return _source.Text;
}
private void InsertImports()
{
if (_importsBuffer.Count == 0) return;
if (_lastImport is not null)
{
_source.InsertMultiLine(_lastImport.EndIndex, _importsBuffer.ToArray());
}
else
{
_source.InsertMultiLineAtStart(_importsBuffer.ToArray());
}
}
private void InsertIncludes()
{
if (_routerIncludeBuffer.Count == 0) return;
if (_lastRouterInclude is not null) if (_lastRouterInclude is not null)
_lastRouterIncludeBuffer.Add(GetOriginalText(_lastRouterInclude)); {
} _source.InsertMultiLine(
_lastRouterInclude.EndIndex, _routerIncludeBuffer.ToArray()
private string ToImportString(string from, string import) => $"from {from} import {import}"; );
}
public void ImportEntity(string from, string import) else
{ {
_lastEntityImportBuffer.Add(ToImportString(from, import)); _source.InsertMultiLineAtEnd(_routerIncludeBuffer.ToArray());
} }
public void ImportAssociation(string from, string import)
{
_lastAssociationImportBuffer.Add(ToImportString(from, import));
}
public void ImportRouter(string from, string import)
{
_lastRouterImportBuffer.Add(ToImportString(from, import));
}
public void IncludeRouter(string prefix, string router)
{
_lastRouterImportBuffer.Add($"app.include_router(prefix=\"/{prefix}\", router={router}.router)");
// _lastRouterImportBuffer.Add($"app.include_router(prefix=\"/{prefix}\", router={router}.router)");
}
public override string Rewrite()
{
if (_lastEntityImport is not null)
Rewrite(_lastEntityImport, _lastEntityImportBuffer.ToArray());
if (_lastAssociationImport is not null)
Rewrite(_lastAssociationImport, _lastAssociationImportBuffer.ToArray());
if (_lastRouterImport is not null)
Rewrite(_lastRouterImport, _lastRouterImportBuffer.ToArray());
return Rewriter.GetText();
}
// public override object? VisitPrimary(PythonParser.PrimaryContext context)
// {
// // Console.WriteLine(GetOriginalText(context));
// return base.VisitPrimary(context);
// }
//
// public override object? VisitName_or_attr(PythonParser.Name_or_attrContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitName_or_attr(context);
// }
//
// public override object? VisitStatement(PythonParser.StatementContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitStatement(context);
// }
// public override object? VisitDotted_name(PythonParser.Dotted_nameContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitDotted_name(context);
// }
//
// public override object? VisitDotted_name(PythonParser.Dotted_nameContext context)
// {
// Console.WriteLine(GetOriginalText(context));
//
// return base.VisitDotted_name(context);
// }
//
// public override object? VisitDotted_as_names(PythonParser.Dotted_as_namesContext context)
// {
// Console.WriteLine(GetOriginalText(context));
//
// return base.VisitDotted_as_names(context);
// }
//
public override object? VisitErrorNode(IErrorNode node)
{
Console.WriteLine(node.GetText());
return base.VisitErrorNode(node);
}
// public override object? VisitValue_pattern(PythonParser.Value_patternContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitValue_pattern(context);
// }
//
// public override object? VisitStar_atom(PythonParser.Star_atomContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitStar_atom(context);
// }
//
// public override object? VisitExpression(PythonParser.ExpressionContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitExpression(context);
// }
//
// public override object? VisitT_primary(PythonParser.T_primaryContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitT_primary(context);
// }
// public override object? VisitAttr(PythonParser.AttrContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitAttr(context);
// }
// public override object? VisitT_primary(PythonParser.T_primaryContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitT_primary(context);
// }
// public override object? VisitAwait_primary(PythonParser.Await_primaryContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitAwait_primary(context);
// }
// public override object? VisitTarget_with_star_atom(PythonParser.Target_with_star_atomContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitTarget_with_star_atom(context);
// }
// public override object? VisitSingle_subscript_attribute_target(
// PythonParser.Single_subscript_attribute_targetContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitSingle_subscript_attribute_target(context);
// }
//
// public override object? VisitSingle_target(PythonParser.Single_targetContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitSingle_target(context);
// }
// public override object? VisitName_or_attr(PythonParser.Name_or_attrContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitName_or_attr(context);
// }
// public override object? VisitNamed_expression(PythonParser.Named_expressionContext context)
// {
// Console.WriteLine(GetOriginalText(context));
//
// return base.VisitNamed_expression(context);
// }
// public override object? VisitPrimary(PythonParser.PrimaryContext context)
// {
// Console.WriteLine(GetOriginalText(context));
// return base.VisitPrimary(context);
// }
public override object? VisitImport_from(PythonParser.Import_fromContext context)
{
var text = GetOriginalText(context);
if (text.StartsWith($"from {Features.Db.FeatureName}.entities.associations"))
_lastAssociationImport = context;
if (text.StartsWith($"from {Features.Db.FeatureName}.entities"))
_lastEntityImport = context;
if (text.StartsWith($"from {Features.Api.FeatureName}.routers"))
_lastRouterImport = context;
return base.VisitImport_from(context);
} }
} }

View File

@ -1,17 +0,0 @@
using Antlr4.Runtime;
namespace MycroForge.CLI.CodeGen;
public abstract class PythonSourceModifier : PythonSourceVisitor
{
protected PythonSourceModifier(string source) : base(source)
{
}
public abstract string Rewrite();
protected void Rewrite(ParserRuleContext context, params string[] text)
{
Rewriter.Replace(from: context.start, to: context.Stop, text: string.Join('\n', text));
}
}

View File

@ -1,27 +0,0 @@
using Antlr4.Runtime;
using MycroForge.Parsing;
namespace MycroForge.CLI.CodeGen;
public abstract class PythonSourceVisitor : PythonParserBaseVisitor<object?>
{
protected CommonTokenStream Stream { get; }
protected PythonParser Parser { get; }
protected TokenStreamRewriter Rewriter { get; }
protected PythonSourceVisitor(string source)
{
var input = new AntlrInputStream(source);
var lexer = new PythonLexer(input);
Stream = new CommonTokenStream(lexer);
Parser = new PythonParser(Stream);
Rewriter = new TokenStreamRewriter(Stream);
}
protected string GetOriginalText(ParserRuleContext context)
{
// The parser does not necessarily return the original source,
// so we return the text from Rewriter.TokenStream, since this is unmodified.
return Rewriter.TokenStream.GetText(context);
}
}

View File

@ -0,0 +1,61 @@
using System.Text.RegularExpressions;
namespace MycroForge.CLI.CodeGen;
public class Source
{
private string _text;
public string Text => _text;
public Source(string text)
{
_text = text;
}
public SourceMatch Find(string pattern)
{
var regex = new Regex(pattern);
var match = regex.Match(_text);
if (match.Value.Length == 0)
throw new Exception($"No match found for pattern: {pattern}");
return new SourceMatch(match.Index, match.Value);
}
public List<SourceMatch> FindAll(string pattern)
{
var regex = new Regex(pattern);
var matches = regex.Matches(_text);
return matches.Select(m => new SourceMatch(m.Index, m.Value)).ToList();
}
public Source InsertSingleLine(int index, string text)
{
_text = _text.Insert(index, string.Join('\n', text));
return this;
}
public Source InsertMultiLine(int index, params string[] text)
{
_text = _text.Insert(index, string.Join('\n', text));
return this;
}
public Source InsertMultiLineAtStart(params string[] text)
{
_text = string.Join('\n', text) + _text;
return this;
}
public Source InsertMultiLineAtEnd(params string[] text)
{
_text += (string.Join('\n', text));
return this;
}
public Source Replace(SourceMatch match, string text)
{
_text = _text.Remove(match.StartIndex, match.Text.Length);
_text = _text.Insert(match.StartIndex, text);
return this;
}
}

View File

@ -0,0 +1,14 @@
namespace MycroForge.CLI.CodeGen;
public class SourceMatch
{
public readonly int StartIndex;
public readonly string Text;
public int EndIndex => StartIndex + Text.Length;
public SourceMatch(int startIndex, string text)
{
StartIndex = startIndex;
Text = text;
}
}

View File

@ -47,14 +47,14 @@ public partial class MycroForge
if (name.FullyQualifiedName() is { Length: 2 } fullName) if (name.FullyQualifiedName() is { Length: 2 } fullName)
{ {
folderPath = Path.Join(folderPath, fullName[0]); folderPath = Path.Combine(folderPath, fullName[0]);
name = fullName[1]; name = fullName[1];
} }
var moduleImportPath = folderPath.Replace('\\', '.').Replace('/', '.'); var moduleImportPath = folderPath.Replace('\\', '.').Replace('/', '.');
var moduleName = name.Underscore().ToLower(); var moduleName = name.Underscore().ToLower();
var fileName = $"{moduleName}.py"; var fileName = $"{moduleName}.py";
var filePath = Path.Join(folderPath, fileName); var filePath = Path.Combine(folderPath, fileName);
await _context.CreateFile(filePath, Template); await _context.CreateFile(filePath, Template);

View File

@ -9,12 +9,12 @@ public partial class MycroForge
{ {
public partial class Generate : Command, ISubCommandOf<Api> public partial class Generate : Command, ISubCommandOf<Api>
{ {
public Generate(IEnumerable<ISubCommandOf<Generate>> subCommands) : public Generate(IEnumerable<ISubCommandOf<Generate>> commands) :
base("generate", "Generate an API item") base("generate", "Generate an API item")
{ {
AddAlias("g"); AddAlias("g");
foreach (var subCommandOf in subCommands.Cast<Command>()) foreach (var command in commands.Cast<Command>())
AddCommand(subCommandOf); AddCommand(command);
} }
} }
} }

View File

@ -74,7 +74,7 @@ public partial class MycroForge
if (name.FullyQualifiedName() is { Length: 2 } fullName) if (name.FullyQualifiedName() is { Length: 2 } fullName)
{ {
folderPath = Path.Join(folderPath, fullName[0]); folderPath = Path.Combine(folderPath, fullName[0]);
name = fullName[1]; name = fullName[1];
} }
@ -89,9 +89,8 @@ public partial class MycroForge
code = code.Replace("%table_name%", name.Underscore().ToLower().Pluralize()); code = code.Replace("%table_name%", name.Underscore().ToLower().Pluralize());
code = code.Replace("%column_definitions%", columnDefinitions); code = code.Replace("%column_definitions%", columnDefinitions);
// var folderPath = Path.Join(, path);
var fileName = $"{name.Underscore().ToLower()}.py"; var fileName = $"{name.Underscore().ToLower()}.py";
var filePath = Path.Join(folderPath, fileName); var filePath = Path.Combine(folderPath, fileName);
await _context.CreateFile(filePath, code); await _context.CreateFile(filePath, code);
var importPathParts = new[] { folderPath, fileName.Replace(".py", "") } var importPathParts = new[] { folderPath, fileName.Replace(".py", "") }
@ -106,6 +105,10 @@ public partial class MycroForge
var env = await _context.ReadFile($"{Features.Db.FeatureName}/env.py"); var env = await _context.ReadFile($"{Features.Db.FeatureName}/env.py");
env = new DbEnvModifier(env, importPath, className).Rewrite(); env = new DbEnvModifier(env, importPath, className).Rewrite();
await _context.WriteFile($"{Features.Db.FeatureName}/env.py", env); await _context.WriteFile($"{Features.Db.FeatureName}/env.py", env);
var main = await _context.ReadFile("main.py");
main = new MainModifier(main).Initialize().Import(importPath, className).Rewrite();
await _context.WriteFile("main.py", main);
} }
private List<ColumnDefinition> GetColumnDefinitions(string[] fields) private List<ColumnDefinition> GetColumnDefinitions(string[] fields)

View File

@ -60,11 +60,11 @@ public partial class MycroForge
if (name.FullyQualifiedName() is { Length: 2} fullName) if (name.FullyQualifiedName() is { Length: 2} fullName)
{ {
folderPath = Path.Join(folderPath, fullName[0]); folderPath = Path.Combine(folderPath, fullName[0]);
name = fullName[1]; name = fullName[1];
} }
var filePath = Path.Join(folderPath, $"{name.Underscore().ToLower()}.py"); var filePath = Path.Combine(folderPath, $"{name.Underscore().ToLower()}.py");
var className = Path.GetFileName(name).Pascalize(); var className = Path.GetFileName(name).Pascalize();
var code = string.Join('\n', withSession ? WithSessionTemplate : DefaultTemplate) var code = string.Join('\n', withSession ? WithSessionTemplate : DefaultTemplate)
.Replace("%class_name%", className); .Replace("%class_name%", className);

View File

@ -1,6 +1,5 @@
using System.CommandLine; using System.CommandLine;
using System.Diagnostics; using System.Diagnostics;
using Microsoft.Scripting.Utils;
using MycroForge.CLI.Commands.Interfaces; using MycroForge.CLI.Commands.Interfaces;
namespace MycroForge.CLI.Commands; namespace MycroForge.CLI.Commands;
@ -28,13 +27,13 @@ public partial class MycroForge
private static async Task<string> CreateFile(string? name = null, string fileExtension = "py") private static async Task<string> CreateFile(string? name = null, string fileExtension = "py")
{ {
var folder = Path.Join( var folder = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".m4g" Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".m4g"
); );
Directory.CreateDirectory(folder); Directory.CreateDirectory(folder);
var filePath = Path.Join(folder, $"{name}.{fileExtension}"); var filePath = Path.Combine(folder, $"{name}.{fileExtension}");
if (File.Exists(filePath)) if (File.Exists(filePath))
throw new Exception($"File {filePath} already exists."); throw new Exception($"File {filePath} already exists.");

View File

@ -26,11 +26,11 @@ public partial class MycroForge
private static async Task EditFile(string name) private static async Task EditFile(string name)
{ {
var folder = Path.Join( var folder = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".m4g" Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".m4g"
); );
var file = Path.Join(folder, $"{name}.py"); var file = Path.Combine(folder, $"{name}.py");
if (!File.Exists(file)) if (!File.Exists(file))
throw new Exception($"File {file} does not exists."); throw new Exception($"File {file} does not exists.");

View File

@ -16,7 +16,7 @@ public partial class MycroForge
private void Execute() private void Execute()
{ {
var folder = Path.Join( var folder = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".m4g" Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".m4g"
); );

View File

@ -7,9 +7,10 @@ public partial class MycroForge : RootCommand
{ {
public override string Name => "m4g"; public override string Name => "m4g";
public MycroForge(IEnumerable<ISubCommandOf<MycroForge>> commands) : base("The MycroForge CLI tool.") public MycroForge(IEnumerable<ISubCommandOf<MycroForge>> commands) :
base("The MycroForge CLI tool.")
{ {
foreach (var subCommandOf in commands.Cast<Command>()) foreach (var command in commands.Cast<Command>())
AddCommand(subCommandOf); AddCommand(command);
} }
} }

View File

@ -1,18 +1,17 @@
using Humanizer; namespace MycroForge.CLI.Extensions;
namespace MycroForge.CLI.Extensions;
public static class StringExtensions public static class StringExtensions
{ {
public static string NormalizePath(this string name)
{
var directoryPath = Path.GetDirectoryName(name).Underscore();
var filePath = Path.Join(directoryPath, name.Underscore().ToLower());
return filePath;
}
public static string[] FullyQualifiedName(this string name) public static string[] FullyQualifiedName(this string name)
{ {
return name.Split(':').Select(s => s.Trim()).ToArray(); return name.Split(':').Select(s => s.Trim()).ToArray();
} }
public static string DeduplicateDots(this string path)
{
while (path.Contains(".."))
path = path.Replace("..", ".");
return path.Trim('.');
}
} }

View File

@ -11,11 +11,6 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<ProjectReference Include="..\MycroForge.Parsing\MycroForge.Parsing.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Antlr4.Runtime" Version="4.6.6" />
<PackageReference Include="Humanizer" Version="2.14.1" /> <PackageReference Include="Humanizer" Version="2.14.1" />
<PackageReference Include="IronPython" Version="3.4.1" /> <PackageReference Include="IronPython" Version="3.4.1" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" /> <PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />

View File

@ -2,64 +2,23 @@
using MycroForge.CLI.Extensions; using MycroForge.CLI.Extensions;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Hosting;
using MycroForge.CLI.CodeGen;
// using var host = Host using var host = Host
// .CreateDefaultBuilder() .CreateDefaultBuilder()
// .ConfigureServices((_, services) => .ConfigureServices((_, services) =>
// { {
// services services
// .AddServices() .AddServices()
// .AddCommands(); .AddCommands();
// }) })
// .Build(); .Build();
//
// try
// {
// await host.Services.GetRequiredService<MycroForge.CLI.Commands.MycroForge>()
// .InvokeAsync(args.Length == 0 ? ["--help"] : args);
// }
// catch(Exception e)
// {
// Console.WriteLine(e.Message);
// }
try
// var rewrite = new EntityFieldReader(string.Join("\n", [ {
// "from typing import Any, Dict", await host.Services.GetRequiredService<MycroForge.CLI.Commands.MycroForge>()
// "from sqlalchemy import JSON, DateTime, String, func", .InvokeAsync(args.Length == 0 ? ["--help"] : args);
// "from sqlalchemy.orm import Mapped, mapped_column", }
// "from sqlalchemy.dialects.mssql import TEXT", catch(Exception e)
// "from orm.entities.entity_base import EntityBase", {
// "class Product(EntityBase):", Console.WriteLine(e.Message);
// "\t__tablename__ = \"products\"", }
// "\tid: Mapped[int] = mapped_column(primary_key=True)",
// "\tmain_key: Mapped[str] = mapped_column(String(255), unique=True, nullable=False)",
// "\terp_key: Mapped[str] = mapped_column(String(255), unique=True, nullable=True)",
// "\tpim_key: Mapped[str] = mapped_column(String(255), unique=True, nullable=True)",
// "\twms_key: Mapped[str] = mapped_column(String(255), unique=True, nullable=True)",
// "\tshared_key: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\taxis_1_code: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\taxis_1_value: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\taxis_2_code: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\taxis_2_value: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\taxis_3_code: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\taxis_3_value: Mapped[str] = mapped_column(String(255), nullable=True)",
// "\tdata: Mapped[Dict[str, Any]] = mapped_column(JSON(), nullable=True)",
// "\tdata_string: Mapped[str] = mapped_column(TEXT(), nullable=True)",
// "\tcreated_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), default=func.now())",
// "\tupdated_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), default=func.now(), onupdate=func.now())",
// "def __repr__(self) -> str:",
// "\treturn f\"Product(id={self.id!r}, main_key={self.main_key!r}, shared_key={self.shared_key})\""
// ])).ReadFields();
//
// rewrite.ForEach(f =>
// {
// Console.WriteLine($"name={f.Name}, type={f.Type}");
// });
var main = new MainModifier(string.Join("\n", await File.ReadAllLinesAsync("scripts/user.py")));
main.Initialize();
Console.WriteLine(main.Rewrite());

View File

@ -1,26 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Antlr4" Version="4.6.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Antlr4.CodeGenerator" Version="4.6.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Antlr4.Runtime" Version="4.6.6" />
</ItemGroup>
<ItemGroup>
<Content Include="PythonLexer.g4" />
<Content Include="PythonParser.g4" />
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff

View File

@ -1,493 +0,0 @@
/*
The MIT License (MIT)
Copyright (c) 2021 Robert Einhorn
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
* Project : Python Indent/Dedent handler for ANTLR4 grammars
*
* Developed by : Robert Einhorn
*/
using Antlr4.Runtime;
using System.Text.RegularExpressions;
namespace MycroForge.Parsing;
public abstract class PythonLexerBase : Lexer
{
// A stack that keeps track of the indentation lengths
private Stack<int> indentLengthStack;
// A list where tokens are waiting to be loaded into the token stream
private LinkedList<IToken> pendingTokens;
// last pending token types
private int previousPendingTokenType;
private int lastPendingTokenTypeFromDefaultChannel;
// The amount of opened parentheses, square brackets, or curly braces
private int opened;
// The amount of opened parentheses and square brackets in the current lexer mode
private Stack<int> paren_or_bracket_openedStack;
private bool wasSpaceIndentation;
private bool wasTabIndentation;
private bool wasIndentationMixedWithSpacesAndTabs;
private const int INVALID_LENGTH = -1;
private CommonToken curToken; // current (under processing) token
private IToken ffgToken; // following (look ahead) token
private const string ERR_TXT = " ERROR: ";
protected PythonLexerBase(ICharStream input) : base(input)
{
this.Init();
}
private void Init()
{
this.indentLengthStack = new Stack<int>();
this.pendingTokens = new LinkedList<IToken>();
this.previousPendingTokenType = 0;
this.lastPendingTokenTypeFromDefaultChannel = 0;
this.opened = 0;
this.paren_or_bracket_openedStack = new Stack<int>();
this.wasSpaceIndentation = false;
this.wasTabIndentation = false;
this.wasIndentationMixedWithSpacesAndTabs = false;
this.curToken = null!;
this.ffgToken = null!;
}
public override IToken NextToken() // reading the input stream until a return EOF
{
this.CheckNextToken();
IToken firstPendingToken = this.pendingTokens.First.Value;
this.pendingTokens.RemoveFirst();
return firstPendingToken; // add the queued token to the token stream
}
private void CheckNextToken()
{
if (this.previousPendingTokenType != TokenConstants.Eof)
{
this.SetCurrentAndFollowingTokens();
if (this.indentLengthStack.Count == 0) // We're at the first token
{
this.HandleStartOfInput();
}
switch (this.curToken.Type)
{
case PythonLexer.LPAR:
case PythonLexer.LSQB:
case PythonLexer.LBRACE:
this.opened++;
this.AddPendingToken(this.curToken);
break;
case PythonLexer.RPAR:
case PythonLexer.RSQB:
case PythonLexer.RBRACE:
this.opened--;
this.AddPendingToken(this.curToken);
break;
case PythonLexer.NEWLINE:
this.HandleNEWLINEtoken();
break;
case PythonLexer.STRING:
this.HandleSTRINGtoken();
break;
case PythonLexer.FSTRING_MIDDLE:
this.HandleFSTRING_MIDDLE_token();
break;
case PythonLexer.ERROR_TOKEN:
this.ReportLexerError("token recognition error at: '" + this.curToken.Text + "'");
this.AddPendingToken(this.curToken);
break;
case TokenConstants.Eof:
this.HandleEOFtoken();
break;
default:
this.AddPendingToken(this.curToken);
break;
}
this.HandleFORMAT_SPECIFICATION_MODE();
}
}
private void SetCurrentAndFollowingTokens()
{
this.curToken = this.ffgToken == null ?
new CommonToken(base.NextToken()) :
new CommonToken(this.ffgToken);
this.HandleFStringLexerModes();
this.ffgToken = this.curToken.Type == TokenConstants.Eof ?
this.curToken :
base.NextToken();
}
// initialize the _indentLengths
// hide the leading NEWLINE token(s)
// if exists, find the first statement (not NEWLINE, not EOF token) that comes from the default channel
// insert a leading INDENT token if necessary
private void HandleStartOfInput()
{
// initialize the stack with a default 0 indentation length
this.indentLengthStack.Push(0); // this will never be popped off
while (this.curToken.Type != TokenConstants.Eof)
{
if (this.curToken.Channel == TokenConstants.DefaultChannel)
{
if (this.curToken.Type == PythonLexer.NEWLINE)
{
// all the NEWLINE tokens must be ignored before the first statement
this.HideAndAddPendingToken(this.curToken);
}
else
{ // We're at the first statement
this.InsertLeadingIndentToken();
return; // continue the processing of the current token with CheckNextToken()
}
}
else
{
this.AddPendingToken(this.curToken); // it can be WS, EXPLICIT_LINE_JOINING, or COMMENT token
}
this.SetCurrentAndFollowingTokens();
} // continue the processing of the EOF token with CheckNextToken()
}
private void InsertLeadingIndentToken()
{
if (this.previousPendingTokenType == PythonLexer.WS)
{
var prevToken = this.pendingTokens.Last.Value;
if (this.GetIndentationLength(prevToken.Text) != 0) // there is an "indentation" before the first statement
{
const string errMsg = "first statement indented";
this.ReportLexerError(errMsg);
// insert an INDENT token before the first statement to raise an 'unexpected indent' error later by the parser
this.CreateAndAddPendingToken(PythonLexer.INDENT, TokenConstants.DefaultChannel, PythonLexerBase.ERR_TXT + errMsg, this.curToken);
}
}
}
private void HandleNEWLINEtoken()
{
if (this.opened > 0)
{
// We're in an implicit line joining, ignore the current NEWLINE token
this.HideAndAddPendingToken(this.curToken);
}
else
{
CommonToken nlToken = new CommonToken(this.curToken); // save the current NEWLINE token
bool isLookingAhead = this.ffgToken.Type == PythonLexer.WS;
if (isLookingAhead)
{
this.SetCurrentAndFollowingTokens(); // set the next two tokens
}
switch (this.ffgToken.Type)
{
case PythonLexer.NEWLINE: // We're before a blank line
case PythonLexer.COMMENT: // We're before a comment
case PythonLexer.TYPE_COMMENT: // We're before a type comment
this.HideAndAddPendingToken(nlToken);
if (isLookingAhead)
{
this.AddPendingToken(this.curToken); // WS token
}
break;
default:
this.AddPendingToken(nlToken);
if (isLookingAhead)
{ // We're on whitespace(s) followed by a statement
int indentationLength = this.ffgToken.Type == TokenConstants.Eof ?
0 :
this.GetIndentationLength(this.curToken.Text);
if (indentationLength != PythonLexerBase.INVALID_LENGTH)
{
this.AddPendingToken(this.curToken); // WS token
this.InsertIndentOrDedentToken(indentationLength); // may insert INDENT token or DEDENT token(s)
}
else
{
this.ReportError("inconsistent use of tabs and spaces in indentation");
}
}
else
{
// We're at a newline followed by a statement (there is no whitespace before the statement)
this.InsertIndentOrDedentToken(0); // may insert DEDENT token(s)
}
break;
}
}
}
private void InsertIndentOrDedentToken(int indentLength)
{
//*** https://docs.python.org/3/reference/lexical_analysis.html#indentation
int prevIndentLength = this.indentLengthStack.Peek();
if (indentLength > prevIndentLength)
{
this.CreateAndAddPendingToken(PythonLexer.INDENT, TokenConstants.DefaultChannel, null, this.ffgToken);
this.indentLengthStack.Push(indentLength);
}
else
{
while (indentLength < prevIndentLength)
{ // more than 1 DEDENT token may be inserted into the token stream
this.indentLengthStack.Pop();
prevIndentLength = this.indentLengthStack.Peek();
if (indentLength <= prevIndentLength)
{
this.CreateAndAddPendingToken(PythonLexer.DEDENT, TokenConstants.DefaultChannel, null, this.ffgToken);
}
else
{
this.ReportError("inconsistent dedent");
}
}
}
}
private void HandleSTRINGtoken()
{
// remove the \<newline> escape sequences from the string literal
// https://docs.python.org/3.11/reference/lexical_analysis.html#string-and-bytes-literals
string line_joinFreeStringLiteral = Regex.Replace(this.curToken.Text, @"\\\r?\n", "");
if (this.curToken.Text.Length == line_joinFreeStringLiteral.Length)
{
this.AddPendingToken(this.curToken);
}
else
{
CommonToken originalSTRINGtoken = new CommonToken(this.curToken); // backup the original token
this.curToken.Text = line_joinFreeStringLiteral;
this.AddPendingToken(this.curToken); // add the modified token with inline string literal
this.HideAndAddPendingToken(originalSTRINGtoken); // add the original token with a hidden channel
// this inserted hidden token allows to restore the original string literal with the \<newline> escape sequences
}
}
private void HandleFSTRING_MIDDLE_token() // replace the double braces '{{' or '}}' to single braces and hide the second braces
{
string fsMid = this.curToken.Text;
fsMid = fsMid.Replace("{{", "{_").Replace("}}", "}_"); // replace: {{ --> {_ and }} --> }_
Regex regex = new Regex(@"(?<=[{}])_");
string[] arrOfStr = regex.Split(fsMid); // split by {_ or }_
foreach (string s in arrOfStr)
{
if (!String.IsNullOrEmpty(s))
{
this.CreateAndAddPendingToken(PythonLexer.FSTRING_MIDDLE, TokenConstants.DefaultChannel, s, this.ffgToken);
string lastCharacter = s.Substring(s.Length - 1);
if ("{}".Contains(lastCharacter))
{
this.CreateAndAddPendingToken(PythonLexer.FSTRING_MIDDLE, TokenConstants.HiddenChannel, lastCharacter, this.ffgToken);
// this inserted hidden token allows to restore the original f-string literal with the double braces
}
}
}
}
private void HandleFStringLexerModes() // https://peps.python.org/pep-0498/#specification
{
if (this._modeStack.Count > 0)
{
switch (this.curToken.Type)
{
case PythonLexer.LBRACE:
this.PushMode(PythonLexer.DefaultMode);
this.paren_or_bracket_openedStack.Push(0);
break;
case PythonLexer.LPAR:
case PythonLexer.LSQB:
// https://peps.python.org/pep-0498/#lambdas-inside-expressions
this.paren_or_bracket_openedStack.Push(this.paren_or_bracket_openedStack.Pop() + 1); // increment the last element
break;
case PythonLexer.RPAR:
case PythonLexer.RSQB:
this.paren_or_bracket_openedStack.Push(this.paren_or_bracket_openedStack.Pop() - 1); // decrement the last element
break;
case PythonLexer.COLON: // colon can only come from DEFAULT_MODE
if (this.paren_or_bracket_openedStack.Peek() == 0)
{
switch (this._modeStack.First()) // check the previous lexer mode (the current is DEFAULT_MODE)
{
case PythonLexer.SINGLE_QUOTE_FSTRING_MODE:
case PythonLexer.LONG_SINGLE_QUOTE_FSTRING_MODE:
case PythonLexer.SINGLE_QUOTE_FORMAT_SPECIFICATION_MODE:
this.Mode(PythonLexer.SINGLE_QUOTE_FORMAT_SPECIFICATION_MODE); // continue in format spec. mode
break;
case PythonLexer.DOUBLE_QUOTE_FSTRING_MODE:
case PythonLexer.LONG_DOUBLE_QUOTE_FSTRING_MODE:
case PythonLexer.DOUBLE_QUOTE_FORMAT_SPECIFICATION_MODE:
this.Mode(PythonLexer.DOUBLE_QUOTE_FORMAT_SPECIFICATION_MODE); // continue in format spec. mode
break;
}
}
break;
case PythonLexer.RBRACE:
switch (_mode)
{
case PythonLexer.DefaultMode:
case PythonLexer.SINGLE_QUOTE_FORMAT_SPECIFICATION_MODE:
case PythonLexer.DOUBLE_QUOTE_FORMAT_SPECIFICATION_MODE:
this.PopMode();
this.paren_or_bracket_openedStack.Pop();
break;
default:
this.ReportLexerError("f-string: single '}' is not allowed");
break;
}
break;
}
}
}
private void HandleFORMAT_SPECIFICATION_MODE()
{
if (this._modeStack.Count > 0 && this.ffgToken.Type == PythonLexer.RBRACE)
{
switch (this.curToken.Type)
{
case PythonLexer.COLON:
case PythonLexer.RBRACE:
// insert an empty FSTRING_MIDDLE token instead of the missing format specification
this.CreateAndAddPendingToken(PythonLexer.FSTRING_MIDDLE, TokenConstants.DefaultChannel, "", this.ffgToken);
break;
}
}
}
private void InsertTrailingTokens()
{
switch (this.lastPendingTokenTypeFromDefaultChannel)
{
case PythonLexer.NEWLINE:
case PythonLexer.DEDENT:
break; // no trailing NEWLINE token is needed
default:
// insert an extra trailing NEWLINE token that serves as the end of the last statement
this.CreateAndAddPendingToken(PythonLexer.NEWLINE, TokenConstants.DefaultChannel, null, this.ffgToken); // ffgToken is EOF
break;
}
this.InsertIndentOrDedentToken(0); // Now insert as many trailing DEDENT tokens as needed
}
private void HandleEOFtoken()
{
if (this.lastPendingTokenTypeFromDefaultChannel > 0)
{ // there was a statement in the input (leading NEWLINE tokens are hidden)
this.InsertTrailingTokens();
}
this.AddPendingToken(this.curToken);
}
private void HideAndAddPendingToken(CommonToken cToken)
{
cToken.Channel = TokenConstants.HiddenChannel;
this.AddPendingToken(cToken);
}
private void CreateAndAddPendingToken(int type, int channel, string text, IToken baseToken)
{
CommonToken cToken = new CommonToken(baseToken);
cToken.Type = type;
cToken.Channel = channel;
cToken.StopIndex = baseToken.StartIndex - 1;
// cToken.Text = text == null
// ? "<" + Vocabulary.GetSymbolicName(type) + ">"
// : text;
cToken.Text = text ?? string.Empty;
this.AddPendingToken(cToken);
}
private void AddPendingToken(IToken token)
{
// save the last pending token type because the pendingTokens linked list can be empty by the nextToken()
this.previousPendingTokenType = token.Type;
if (token.Channel == TokenConstants.DefaultChannel)
{
this.lastPendingTokenTypeFromDefaultChannel = this.previousPendingTokenType;
}
this.pendingTokens.AddLast(token);
}
private int GetIndentationLength(string textWS) // the textWS may contain spaces, tabs or form feeds
{
const int TAB_LENGTH = 8; // the standard number of spaces to replace a tab with spaces
int length = 0;
foreach (char ch in textWS)
{
switch (ch)
{
case ' ':
this.wasSpaceIndentation = true;
length += 1;
break;
case '\t':
this.wasTabIndentation = true;
length += TAB_LENGTH - (length % TAB_LENGTH);
break;
case '\f': // form feed
length = 0;
break;
}
}
if (this.wasTabIndentation && this.wasSpaceIndentation)
{
if (!this.wasIndentationMixedWithSpacesAndTabs)
{
this.wasIndentationMixedWithSpacesAndTabs = true;
return PythonLexerBase.INVALID_LENGTH; // only for the first inconsistent indent
}
}
return length;
}
private void ReportLexerError(string errMsg)
{
// this.ErrorListenerDispatch.SyntaxError(this.ErrorOutput, this, this.curToken.Type, this.curToken.Line, this.curToken.Column, " LEXER" + PythonLexerBase.ERR_TXT + errMsg, null);
this.ErrorListenerDispatch.SyntaxError( this, this.curToken.Type, this.curToken.Line, this.curToken.Column, " LEXER" + PythonLexerBase.ERR_TXT + errMsg, null);
}
private void ReportError(string errMsg)
{
this.ReportLexerError(errMsg);
// the ERROR_TOKEN will raise an error in the parser
this.CreateAndAddPendingToken(PythonLexer.ERROR_TOKEN, TokenConstants.DefaultChannel, PythonLexerBase.ERR_TXT + errMsg, this.ffgToken);
}
public override void Reset()
{
this.Init();
base.Reset();
}
}

View File

@ -1,880 +0,0 @@
/*
Python grammar
The MIT License (MIT)
Copyright (c) 2021 Robert Einhorn
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
* Project : an ANTLR4 parser grammar by the official PEG grammar
* https://github.com/RobEin/ANTLR4-parser-for-Python-3.12
* Developed by : Robert Einhorn
*
*/
parser grammar PythonParser; // Python 3.12.1 https://docs.python.org/3.12/reference/grammar.html#full-grammar-specification
options {
tokenVocab=PythonLexer;
superClass=PythonParserBase;
}
// STARTING RULES
// ==============
file_input: statements? EOF;
interactive: statement_newline;
eval: expressions NEWLINE* EOF;
func_type: '(' type_expressions? ')' '->' expression NEWLINE* EOF;
fstring_input: star_expressions;
// GENERAL STATEMENTS
// ==================
statements: statement+;
statement: compound_stmt | simple_stmts;
statement_newline
: compound_stmt NEWLINE
| simple_stmts
| NEWLINE
| EOF;
simple_stmts
: simple_stmt (';' simple_stmt)* ';'? NEWLINE
;
// NOTE: assignment MUST precede expression, else parsing a simple assignment
// will throw a SyntaxError.
simple_stmt
: assignment
| type_alias
| star_expressions
| return_stmt
| import_stmt
| raise_stmt
| 'pass'
| del_stmt
| yield_stmt
| assert_stmt
| 'break'
| 'continue'
| global_stmt
| nonlocal_stmt;
compound_stmt
: function_def
| if_stmt
| class_def
| with_stmt
| for_stmt
| try_stmt
| while_stmt
| match_stmt;
// SIMPLE STATEMENTS
// =================
// NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield'
assignment
: NAME ':' expression ('=' annotated_rhs )?
| ('(' single_target ')'
| single_subscript_attribute_target) ':' expression ('=' annotated_rhs )?
| (star_targets '=' )+ (yield_expr | star_expressions) TYPE_COMMENT?
| single_target augassign (yield_expr | star_expressions);
annotated_rhs: yield_expr | star_expressions;
augassign
: '+='
| '-='
| '*='
| '@='
| '/='
| '%='
| '&='
| '|='
| '^='
| '<<='
| '>>='
| '**='
| '//=';
return_stmt
: 'return' star_expressions?;
raise_stmt
: 'raise' (expression ('from' expression )?)?
;
global_stmt: 'global' NAME (',' NAME)*;
nonlocal_stmt: 'nonlocal' NAME (',' NAME)*;
del_stmt
: 'del' del_targets;
yield_stmt: yield_expr;
assert_stmt: 'assert' expression (',' expression )?;
import_stmt
: import_name
| import_from;
// Import statements
// -----------------
import_name: 'import' dotted_as_names;
// note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
import_from
: 'from' ('.' | '...')* dotted_name 'import' import_from_targets
| 'from' ('.' | '...')+ 'import' import_from_targets;
import_from_targets
: '(' import_from_as_names ','? ')'
| import_from_as_names
| '*';
import_from_as_names
: import_from_as_name (',' import_from_as_name)*;
import_from_as_name
: NAME ('as' NAME )?;
dotted_as_names
: dotted_as_name (',' dotted_as_name)*;
dotted_as_name
: dotted_name ('as' NAME )?;
dotted_name
: dotted_name '.' NAME
| NAME;
// COMPOUND STATEMENTS
// ===================
// Common elements
// ---------------
block
: NEWLINE INDENT statements DEDENT
| simple_stmts;
decorators: ('@' named_expression NEWLINE )+;
// Class definitions
// -----------------
class_def
: decorators class_def_raw
| class_def_raw;
class_def_raw
: 'class' NAME type_params? ('(' arguments? ')' )? ':' block;
// Function definitions
// --------------------
function_def
: decorators function_def_raw
| function_def_raw;
function_def_raw
: 'def' NAME type_params? '(' params? ')' ('->' expression )? ':' func_type_comment? block
| ASYNC 'def' NAME type_params? '(' params? ')' ('->' expression )? ':' func_type_comment? block;
// Function parameters
// -------------------
params
: parameters;
parameters
: slash_no_default param_no_default* param_with_default* star_etc?
| slash_with_default param_with_default* star_etc?
| param_no_default+ param_with_default* star_etc?
| param_with_default+ star_etc?
| star_etc;
// Some duplication here because we can't write (',' | {isCurrentTokenType(RPAR)}?),
// which is because we don't support empty alternatives (yet).
slash_no_default
: param_no_default+ '/' ','?
;
slash_with_default
: param_no_default* param_with_default+ '/' ','?
;
star_etc
: '*' param_no_default param_maybe_default* kwds?
| '*' param_no_default_star_annotation param_maybe_default* kwds?
| '*' ',' param_maybe_default+ kwds?
| kwds;
kwds
: '**' param_no_default;
// One parameter. This *includes* a following comma and type comment.
//
// There are three styles:
// - No default_assignment
// - With default_assignment
// - Maybe with default_assignment
//
// There are two alternative forms of each, to deal with type comments:
// - Ends in a comma followed by an optional type comment
// - No comma, optional type comment, must be followed by close paren
// The latter form is for a final parameter without trailing comma.
//
param_no_default
: param ','? TYPE_COMMENT?
;
param_no_default_star_annotation
: param_star_annotation ','? TYPE_COMMENT?
;
param_with_default
: param default_assignment ','? TYPE_COMMENT?
;
param_maybe_default
: param default_assignment? ','? TYPE_COMMENT?
;
param: NAME annotation?;
param_star_annotation: NAME star_annotation;
annotation: ':' expression;
star_annotation: ':' star_expression;
default_assignment: '=' expression;
// If statement
// ------------
if_stmt
: 'if' named_expression ':' block (elif_stmt | else_block?)
;
elif_stmt
: 'elif' named_expression ':' block (elif_stmt | else_block?)
;
else_block
: 'else' ':' block;
// While statement
// ---------------
while_stmt
: 'while' named_expression ':' block else_block?;
// For statement
// -------------
for_stmt
: ASYNC? 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?
;
// With statement
// --------------
with_stmt
: ASYNC? 'with' ( '(' with_item (',' with_item)* ','? ')' ':'
| with_item (',' with_item)* ':' TYPE_COMMENT?
) block
;
with_item
: expression ('as' star_target)?
;
// Try statement
// -------------
try_stmt
: 'try' ':' block finally_block
| 'try' ':' block except_block+ else_block? finally_block?
| 'try' ':' block except_star_block+ else_block? finally_block?;
// Except statement
// ----------------
except_block
: 'except' (expression ('as' NAME )?)? ':' block
;
except_star_block
: 'except' '*' expression ('as' NAME )? ':' block;
finally_block
: 'finally' ':' block;
// Match statement
// ---------------
match_stmt
: soft_kw_match subject_expr ':' NEWLINE INDENT case_block+ DEDENT;
subject_expr
: star_named_expression ',' star_named_expressions?
| named_expression;
case_block
: soft_kw_case patterns guard? ':' block;
guard: 'if' named_expression;
patterns
: open_sequence_pattern
| pattern;
pattern
: as_pattern
| or_pattern;
as_pattern
: or_pattern 'as' pattern_capture_target;
or_pattern
: closed_pattern ('|' closed_pattern)*;
closed_pattern
: literal_pattern
| capture_pattern
| wildcard_pattern
| value_pattern
| group_pattern
| sequence_pattern
| mapping_pattern
| class_pattern;
// Literal patterns are used for equality and identity constraints
literal_pattern
: signed_number
| complex_number
| strings
| 'None'
| 'True'
| 'False';
// Literal expressions are used to restrict permitted mapping pattern keys
literal_expr
: signed_number
| complex_number
| strings
| 'None'
| 'True'
| 'False';
complex_number
: signed_real_number ('+' | '-') imaginary_number
;
signed_number
: '-'? NUMBER
;
signed_real_number
: '-'? real_number
;
real_number
: NUMBER;
imaginary_number
: NUMBER;
capture_pattern
: pattern_capture_target;
pattern_capture_target
: soft_kw__not__wildcard;
wildcard_pattern
: soft_kw_wildcard;
value_pattern
: attr;
attr
: NAME ('.' NAME)+
;
name_or_attr
: NAME ('.' NAME)*
;
group_pattern
: '(' pattern ')';
sequence_pattern
: '[' maybe_sequence_pattern? ']'
| '(' open_sequence_pattern? ')';
open_sequence_pattern
: maybe_star_pattern ',' maybe_sequence_pattern?;
maybe_sequence_pattern
: maybe_star_pattern (',' maybe_star_pattern)* ','?;
maybe_star_pattern
: star_pattern
| pattern;
star_pattern
: '*' pattern_capture_target
| '*' wildcard_pattern;
mapping_pattern
: LBRACE RBRACE
| LBRACE double_star_pattern ','? RBRACE
| LBRACE items_pattern (',' double_star_pattern)? ','? RBRACE
;
items_pattern
: key_value_pattern (',' key_value_pattern)*;
key_value_pattern
: (literal_expr | attr) ':' pattern;
double_star_pattern
: '**' pattern_capture_target;
class_pattern
: name_or_attr '(' ((positional_patterns (',' keyword_patterns)? | keyword_patterns) ','?)? ')'
;
positional_patterns
: pattern (',' pattern)*;
keyword_patterns
: keyword_pattern (',' keyword_pattern)*;
keyword_pattern
: NAME '=' pattern;
// Type statement
// ---------------
type_alias
: soft_kw_type NAME type_params? '=' expression;
// Type parameter declaration
// --------------------------
type_params: '[' type_param_seq ']';
type_param_seq: type_param (',' type_param)* ','?;
type_param
: NAME type_param_bound?
| '*' NAME (':' expression)?
| '**' NAME (':' expression)?
;
type_param_bound: ':' expression;
// EXPRESSIONS
// -----------
expressions
: expression (',' expression )* ','?
;
expression
: disjunction ('if' disjunction 'else' expression)?
| lambdef
;
yield_expr
: 'yield' ('from' expression | star_expressions?)
;
star_expressions
: star_expression (',' star_expression )* ','?
;
star_expression
: '*' bitwise_or
| expression;
star_named_expressions: star_named_expression (',' star_named_expression)* ','?;
star_named_expression
: '*' bitwise_or
| named_expression;
assignment_expression
: NAME ':=' expression;
named_expression
: assignment_expression
| expression;
disjunction
: conjunction ('or' conjunction )*
;
conjunction
: inversion ('and' inversion )*
;
inversion
: 'not' inversion
| comparison;
// Comparison operators
// --------------------
comparison
: bitwise_or compare_op_bitwise_or_pair*
;
compare_op_bitwise_or_pair
: eq_bitwise_or
| noteq_bitwise_or
| lte_bitwise_or
| lt_bitwise_or
| gte_bitwise_or
| gt_bitwise_or
| notin_bitwise_or
| in_bitwise_or
| isnot_bitwise_or
| is_bitwise_or;
eq_bitwise_or: '==' bitwise_or;
noteq_bitwise_or
: ('!=' ) bitwise_or;
lte_bitwise_or: '<=' bitwise_or;
lt_bitwise_or: '<' bitwise_or;
gte_bitwise_or: '>=' bitwise_or;
gt_bitwise_or: '>' bitwise_or;
notin_bitwise_or: 'not' 'in' bitwise_or;
in_bitwise_or: 'in' bitwise_or;
isnot_bitwise_or: 'is' 'not' bitwise_or;
is_bitwise_or: 'is' bitwise_or;
// Bitwise operators
// -----------------
bitwise_or
: bitwise_or '|' bitwise_xor
| bitwise_xor;
bitwise_xor
: bitwise_xor '^' bitwise_and
| bitwise_and;
bitwise_and
: bitwise_and '&' shift_expr
| shift_expr;
shift_expr
: shift_expr ('<<' | '>>') sum
| sum
;
// Arithmetic operators
// --------------------
sum
: sum ('+' | '-') term
| term
;
term
: term ('*' | '/' | '//' | '%' | '@') factor
| factor
;
factor
: '+' factor
| '-' factor
| '~' factor
| power;
power
: await_primary ('**' factor)?
;
// Primary elements
// ----------------
// Primary elements are things like "obj.something.something", "obj[something]", "obj(something)", "obj" ...
await_primary
: AWAIT primary
| primary;
primary
: primary ('.' NAME | genexp | '(' arguments? ')' | '[' slices ']')
| atom
;
slices
: slice
| (slice | starred_expression) (',' (slice | starred_expression))* ','?;
slice
: expression? ':' expression? (':' expression? )?
| named_expression;
atom
: NAME
| 'True'
| 'False'
| 'None'
| strings
| NUMBER
| (tuple | group | genexp)
| (list | listcomp)
| (dict | set | dictcomp | setcomp)
| '...';
group
: '(' (yield_expr | named_expression) ')';
// Lambda functions
// ----------------
lambdef
: 'lambda' lambda_params? ':' expression;
lambda_params
: lambda_parameters;
// lambda_parameters etc. duplicates parameters but without annotations
// or type comments, and if there's no comma after a parameter, we expect
// a colon, not a close parenthesis. (For more, see parameters above.)
//
lambda_parameters
: lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?
| lambda_slash_with_default lambda_param_with_default* lambda_star_etc?
| lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?
| lambda_param_with_default+ lambda_star_etc?
| lambda_star_etc;
lambda_slash_no_default
: lambda_param_no_default+ '/' ','?
;
lambda_slash_with_default
: lambda_param_no_default* lambda_param_with_default+ '/' ','?
;
lambda_star_etc
: '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?
| '*' ',' lambda_param_maybe_default+ lambda_kwds?
| lambda_kwds;
lambda_kwds
: '**' lambda_param_no_default;
lambda_param_no_default
: lambda_param ','?
;
lambda_param_with_default
: lambda_param default_assignment ','?
;
lambda_param_maybe_default
: lambda_param default_assignment? ','?
;
lambda_param: NAME;
// LITERALS
// ========
fstring_middle
: fstring_replacement_field
| FSTRING_MIDDLE;
fstring_replacement_field
: LBRACE (yield_expr | star_expressions) '='? fstring_conversion? fstring_full_format_spec? RBRACE;
fstring_conversion
: '!' NAME;
fstring_full_format_spec
: ':' fstring_format_spec*;
fstring_format_spec
: FSTRING_MIDDLE
| fstring_replacement_field;
fstring
: FSTRING_START fstring_middle* FSTRING_END;
string: STRING;
strings: (fstring|string)+;
list
: '[' star_named_expressions? ']';
tuple
: '(' (star_named_expression ',' star_named_expressions? )? ')';
set: LBRACE star_named_expressions RBRACE;
// Dicts
// -----
dict
: LBRACE double_starred_kvpairs? RBRACE;
double_starred_kvpairs: double_starred_kvpair (',' double_starred_kvpair)* ','?;
double_starred_kvpair
: '**' bitwise_or
| kvpair;
kvpair: expression ':' expression;
// Comprehensions & Generators
// ---------------------------
for_if_clauses
: for_if_clause+;
for_if_clause
: ASYNC? 'for' star_targets 'in' disjunction ('if' disjunction )*
;
listcomp
: '[' named_expression for_if_clauses ']';
setcomp
: LBRACE named_expression for_if_clauses RBRACE;
genexp
: '(' ( assignment_expression | expression) for_if_clauses ')';
dictcomp
: LBRACE kvpair for_if_clauses RBRACE;
// FUNCTION CALL ARGUMENTS
// =======================
arguments
: args ','?;
args
: (starred_expression | ( assignment_expression | expression)) (',' (starred_expression | ( assignment_expression | expression)))* (',' kwargs )?
| kwargs;
kwargs
: kwarg_or_starred (',' kwarg_or_starred)* (',' kwarg_or_double_starred (',' kwarg_or_double_starred)*)?
| kwarg_or_double_starred (',' kwarg_or_double_starred)*
;
starred_expression
: '*' expression;
kwarg_or_starred
: NAME '=' expression
| starred_expression;
kwarg_or_double_starred
: NAME '=' expression
| '**' expression;
// ASSIGNMENT TARGETS
// ==================
// Generic targets
// ---------------
// NOTE: star_targets may contain *bitwise_or, targets may not.
star_targets
: star_target (',' star_target )* ','?
;
star_targets_list_seq: star_target (',' star_target)+ ','?;
star_targets_tuple_seq
: star_target (',' | (',' star_target )+ ','?)
;
star_target
: '*' (star_target)
| target_with_star_atom;
target_with_star_atom
: t_primary ('.' NAME | '[' slices ']')
| star_atom
;
star_atom
: NAME
| '(' target_with_star_atom ')'
| '(' star_targets_tuple_seq? ')'
| '[' star_targets_list_seq? ']';
single_target
: single_subscript_attribute_target
| NAME
| '(' single_target ')';
single_subscript_attribute_target
: t_primary ('.' NAME | '[' slices ']')
;
t_primary
: t_primary ('.' NAME | '[' slices ']' | genexp | '(' arguments? ')')
| atom
;
// Targets for del statements
// --------------------------
del_targets: del_target (',' del_target)* ','?;
del_target
: t_primary ('.' NAME | '[' slices ']')
| del_t_atom
;
del_t_atom
: NAME
| '(' del_target ')'
| '(' del_targets? ')'
| '[' del_targets? ']';
// TYPING ELEMENTS
// ---------------
// type_expressions allow */** but ignore them
type_expressions
: expression (',' expression)* (',' ('*' expression (',' '**' expression)? | '**' expression))?
| '*' expression (',' '**' expression)?
| '**' expression
;
func_type_comment
: NEWLINE TYPE_COMMENT // Must be followed by indented block
| TYPE_COMMENT;
// *** Soft Keywords: https://docs.python.org/3.12/reference/lexical_analysis.html#soft-keywords
soft_kw_type: {this.isEqualToCurrentTokenText("type")}? NAME;
soft_kw_match: {this.isEqualToCurrentTokenText("match")}? NAME;
soft_kw_case: {this.isEqualToCurrentTokenText("case")}? NAME;
soft_kw_wildcard: {this.isEqualToCurrentTokenText("_")}? NAME;
soft_kw__not__wildcard: {this.isnotEqualToCurrentTokenText("_")}? NAME;
// ========================= END OF THE GRAMMAR ===========================

View File

@ -1,21 +0,0 @@
using Antlr4.Runtime;
namespace MycroForge.Parsing;
public abstract class PythonParserBase : Parser
{
protected PythonParserBase(ITokenStream input) : base(input)
{
}
// https://docs.python.org/3/reference/lexical_analysis.html#soft-keywords
public bool isEqualToCurrentTokenText(string tokenText)
{
return this.CurrentToken.Text == tokenText;
}
public bool isnotEqualToCurrentTokenText(string tokenText)
{
return !this.isEqualToCurrentTokenText(tokenText); // for compatibility with the Python 'not' logical operator
}
}

View File

@ -2,8 +2,6 @@
Microsoft Visual Studio Solution File, Format Version 12.00 Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MycroForge.CLI", "MycroForge.CLI\MycroForge.CLI.csproj", "{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}" Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MycroForge.CLI", "MycroForge.CLI\MycroForge.CLI.csproj", "{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}"
EndProject EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MycroForge.Parsing", "MycroForge.Parsing\MycroForge.Parsing.csproj", "{D697CEFD-7CF7-4680-82FC-F84B08F81635}"
EndProject
Global Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU Debug|Any CPU = Debug|Any CPU
@ -14,9 +12,5 @@ Global
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Debug|Any CPU.Build.0 = Debug|Any CPU {27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Debug|Any CPU.Build.0 = Debug|Any CPU
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Release|Any CPU.ActiveCfg = Release|Any CPU {27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Release|Any CPU.ActiveCfg = Release|Any CPU
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Release|Any CPU.Build.0 = Release|Any CPU {27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Release|Any CPU.Build.0 = Release|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection EndGlobalSection
EndGlobal EndGlobal