Initial commit

This commit is contained in:
mdnapo 2024-04-20 16:47:13 +02:00
commit 0a4c5ebb8d
44 changed files with 4236 additions and 0 deletions

34
.dockerignore Normal file
View File

@ -0,0 +1,34 @@
# Include any files or directories that you don't want to be copied to your
# container here (e.g., local build artifacts, temporary files, etc.).
#
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/go/build-context-dockerignore/
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/bin
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
**/venv

43
.gitignore vendored Normal file
View File

@ -0,0 +1,43 @@
*.swp
*.*~
project.lock.json
.DS_Store
*.pyc
nupkg/
# Visual Studio Code
.vscode/
# Rider
.idea/
# Visual Studio
.vs/
# Fleet
.fleet/
# Code Rush
.cr/
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
msbuild.log
msbuild.err
msbuild.wrn

32
Dockerfile Normal file
View File

@ -0,0 +1,32 @@
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:8.0-alpine AS build
COPY . /source
WORKDIR /source/MicroForge.CLI
ARG TARGETARCH
# Leverage a cache mount to /root/.nuget/packages so that subsequent builds don't have to re-download packages.
# If TARGETARCH is "amd64", replace it with "x64" - "x64" is .NET's canonical name for this and "amd64" doesn't
# work in .NET 6.0.
RUN --mount=type=cache,id=nuget,target=/root/.nuget/packages \
dotnet publish -a ${TARGETARCH/amd64/x64} --use-current-runtime --self-contained false -o /app
FROM mcr.microsoft.com/dotnet/aspnet:8.0-bookworm-slim AS final
RUN apt update -y && \
apt upgrade -y && \
apt install -y git && \
apt install -y bash && \
apt install -y python3 && \
apt install -y python3-pip && \
apt install -y python3-venv
# The Docker approach doesn't work for now, because the venv setup depends on absolute paths.
# This means that the would need to recreate the full path to the actual working directory in the Docker container,
# which should be pretty doable, but it's a concern for later.
ENV PYTHONUNBUFFERED=1
WORKDIR /app
COPY --from=build /app .
WORKDIR /project
COPY MicroForge.CLI/scripts /scripts
USER root
ENTRYPOINT ["dotnet", "/app/MicroForge.CLI.dll"] CMD ["-?"]

View File

@ -0,0 +1,6 @@
namespace MicroForge.CLI;
public class ArgsContext
{
public string[] Args { get; init; }
}

46
MicroForge.CLI/Bash.cs Normal file
View File

@ -0,0 +1,46 @@
using System.Diagnostics;
using System.Text;
using MicroForge.CLI.Exceptions;
namespace MicroForge.CLI;
public static class Bash
{
public static async Task ExecuteAsync(params string[] script)
{
var info = new ProcessStartInfo
{
FileName = "bash",
UseShellExecute = false,
CreateNoWindow = true,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
};
using var process = Process.Start(info);
if (process is null)
throw new NullReferenceException("Could not initialize bash process.");
await using var input = process.StandardInput;
foreach (var line in script)
await input.WriteLineAsync(line);
await input.FlushAsync();
input.Close();
var sb = new StringBuilder();
sb.Append(await process.StandardOutput.ReadToEndAsync());
sb.Append(await process.StandardError.ReadToEndAsync());
Console.WriteLine(sb.ToString());
await process.WaitForExitAsync();
// if (process.ExitCode != 0)
// throw new BashException($"Process exited with status code {process.ExitCode}.");
if (process.ExitCode != 0)
Console.WriteLine($"Process exited with status code {process.ExitCode}.");
}
}

View File

@ -0,0 +1,52 @@
using MicroForge.Parsing;
namespace MicroForge.CLI.CodeGen;
public class OrmEnvInitializer : PythonSourceModifier
{
public OrmEnvInitializer(string source) : base(source)
{
}
public override object? VisitImport_from(PythonParser.Import_fromContext context)
{
var text = GetOriginalText(context);
if (text != "from alembic import context") return null;
Rewrite(context,
text,
"from orm.settings import OrmSettings",
"from orm.entities.entity_base import EntityBase"
);
return base.VisitImport_from(context);
}
public override object? VisitAssignment(PythonParser.AssignmentContext context)
{
var text = GetOriginalText(context);
Console.WriteLine(text);
if (text == "target_metadata = None")
{
Rewrite(context, "target_metadata = EntityBase.metadata");
}
else if (text == "url = config.get_main_option(\"sqlalchemy.url\")")
{
Rewrite(context, "url = OrmSettings.get_connectionstring()");
}
else if (text.StartsWith("connectable ="))
{
// Important note, the indent here is 4 spaces and not tab(s).
const string indent = " ";
Rewrite(context, [
"url = OrmSettings.get_connectionstring()",
$"{indent}context.config.set_main_option('sqlalchemy.url', url)",
$"{indent}{text}"
]);
}
return base.VisitAssignment(context);
}
}

View File

@ -0,0 +1,29 @@
using MicroForge.Parsing;
namespace MicroForge.CLI.CodeGen;
public class OrmEnvUpdater : PythonSourceModifier
{
private readonly string _moduleName;
private readonly string _className;
public OrmEnvUpdater(string source, string moduleName, string className) : base(source)
{
_moduleName = moduleName;
_className = className;
}
public override object? VisitImport_from(PythonParser.Import_fromContext context)
{
var text = GetOriginalText(context);
if (text != "from orm.entities.entity_base import EntityBase") return null;
Rewrite(context, [
text,
$"from orm.entities.{_moduleName} import {_className}"
]);
return base.VisitImport_from(context);
}
}

View File

@ -0,0 +1,39 @@
using Antlr4.Runtime;
using MicroForge.Parsing;
namespace MicroForge.CLI.CodeGen;
public abstract class PythonSourceModifier : PythonParserBaseVisitor<object?>
{
private CommonTokenStream Stream { get; }
private PythonParser Parser { get; }
private TokenStreamRewriter Rewriter { get; }
protected PythonSourceModifier(string source)
{
var input = new AntlrInputStream(source);
var lexer = new PythonLexer(input);
Stream = new CommonTokenStream(lexer);
Parser = new PythonParser(Stream);
Rewriter = new TokenStreamRewriter(Stream);
}
public string Rewrite()
{
var tree = Parser.file_input();
Visit(tree);
return Rewriter.GetText();
}
protected string GetOriginalText(ParserRuleContext context)
{
// The parser does not necessarily return the original source,
// so we return the text from Rewriter.TokenStream, since this is unmodified.
return Rewriter.TokenStream.GetText(context);
}
protected void Rewrite(ParserRuleContext context, params string[] text)
{
Rewriter.Replace(from: context.start, to: context.Stop, text: string.Join('\n', text));
}
}

View File

@ -0,0 +1,7 @@
using System.CommandLine;
namespace MicroForge.CLI.Commands.Interfaces;
public interface ISubCommandOf<T> where T : Command
{
}

View File

@ -0,0 +1,21 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
using MicroForge.CLI.Features;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Add
{
public class Api : Command, ISubCommandOf<Add>
{
public Api(ProjectContext context, IEnumerable<IFeature> features) :
base("api", "Add FastAPI to your project")
{
var feature = features.First(f => f.Name == Features.Api.FeatureName);
this.SetHandler(async () => await feature.ExecuteAsync(context));
}
}
}
}

View File

@ -0,0 +1,26 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
using MicroForge.CLI.Features;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Add
{
public class Orm : Command, ISubCommandOf<Add>
{
public Orm(ProjectContext context, IEnumerable<IFeature> features) :
base("orm", "Add SQLAlchemy to your project")
{
var feature = features.First(f => f.Name == Features.Orm.FeatureName);
this.SetHandler(async () => await feature.ExecuteAsync(context));
}
public class Generate
{
}
}
}
}

View File

@ -0,0 +1,33 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public new partial class Add : Command, ISubCommandOf<MicroForge>
{
public Add(IEnumerable<ISubCommandOf<Add>> subCommands) :
base("add", "Add a predefined feature to your project")
{
foreach (var subCommandOf in subCommands)
AddCommand((subCommandOf as Command)!);
}
}
public class Run : Command, ISubCommandOf<MicroForge>
{
public Run() : base("run", "Run your app")
{
this.SetHandler(ExecuteAsync);
}
private async Task ExecuteAsync()
{
await Bash.ExecuteAsync([
"source .venv/bin/activate",
"uvicorn main:app --reload"
]);
}
}
}

View File

@ -0,0 +1,56 @@
using System.CommandLine;
using Humanizer;
using MicroForge.CLI.CodeGen;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Generate
{
public class Entity : Command, ISubCommandOf<Generate>
{
private static readonly string[] Template =
[
"from sqlalchemy import INTEGER, Column, String",
"from orm.entities.entity_base import EntityBase",
"",
"class %class_name%(EntityBase):",
"\t__tablename__ = \"%table_name%\"",
"\tid = Column(INTEGER, primary_key=True)",
"",
"\tdef __repr__(self) -> str:",
"\t\treturn f\"%class_name%(id={self.id!r})\""
];
private static readonly Argument<string> NameArgument =
new(name: "name", description: "The name of the orm entity");
private readonly ProjectContext _context;
public Entity(ProjectContext context) : base("entity", "Generate and orm entity")
{
_context = context;
AddAlias("e");
AddArgument(NameArgument);
this.SetHandler(ExecuteAsync, NameArgument);
}
private async Task ExecuteAsync(string name)
{
var className = name.Underscore().Pascalize();
var moduleName = name.Underscore();
var code = string.Join('\n', Template);
code = code.Replace("%class_name%", className);
code = code.Replace("%table_name%", name.ToLower().Underscore());
await _context.CreateFile($"orm/entities/{moduleName}.py", code);
var env = await _context.ReadFile("orm/env.py");
env = new OrmEnvUpdater(env, moduleName, className).Rewrite();
await _context.WriteFile("orm/env.py", env);
}
}
}
}

View File

@ -0,0 +1,32 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Generate
{
public class Migration : Command, ISubCommandOf<Generate>
{
private static readonly Argument<string> NameArgument =
new(name: "name", description: "The name of the migration");
public Migration() : base("migration", "Generate a migration")
{
AddAlias("m");
AddArgument(NameArgument);
this.SetHandler(ExecuteAsync, NameArgument);
}
private async Task ExecuteAsync(string name)
{
await Bash.ExecuteAsync(
"source .venv/bin/activate",
$"alembic revision --autogenerate -m \"{name}\" --rev-id $(date -u +\"%Y%m%d%H%M%S\")"
);
}
}
}
}

View File

@ -0,0 +1,53 @@
using System.CommandLine;
using Humanizer;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Generate
{
public class Router : Command, ISubCommandOf<Generate>
{
private static readonly string[] Template =
[
"from fastapi import APIRouter",
"from fastapi.responses import JSONResponse",
"from fastapi.encoders import jsonable_encoder",
"",
"router = APIRouter()",
"",
"@router.get(\"/{name}\")",
"async def index(name: str):",
"\treturn JSONResponse(status_code=200, content=jsonable_encoder({'greeting': f\"Hello, {name}!\"}))"
];
private static readonly Argument<string> NameArgument =
new(name: "name", description: "The name of the api router");
private readonly ProjectContext _context;
public Router(ProjectContext context) : base("router", "Generate an api router")
{
_context = context;
AddAlias("r");
AddArgument(NameArgument);
this.SetHandler(ExecuteAsync, NameArgument);
}
private async Task ExecuteAsync(string name)
{
var moduleName = name.Underscore();
await _context.CreateFile($"api/routers/{moduleName}.py", Template);
var main = await _context.ReadFile("main.py");
main += string.Join('\n',
$"\nfrom api.routers import {moduleName}",
$"app.include_router(prefix=\"/{name.Kebaberize()}\", router={moduleName}.router)\n"
);
await _context.WriteFile("main.py", main);
}
}
}
}

View File

@ -0,0 +1,18 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Generate : Command, ISubCommandOf<MicroForge>
{
public Generate(IEnumerable<ISubCommandOf<Generate>> subCommands) :
base("generate", "Generate a project item")
{
AddAlias("g");
foreach (var subCommandOf in subCommands)
AddCommand((subCommandOf as Command)!);
}
}
}

View File

@ -0,0 +1,159 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
using MicroForge.CLI.Features;
using Microsoft.Extensions.DependencyInjection;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public class Init : Command, ISubCommandOf<MicroForge>
{
#region GitIgnore
private static readonly string[] GitIgnore =
[
"# Byte-compiled / optimized / DLL files", "__pycache__/", "*.py[cod]", "*$py.class", "# C extensions",
"*.so", "# Distribution / packaging", ".Python", "build/", "develop-eggs/", "dist/", "downloads/", "eggs/",
".eggs/", "lib/", "lib64/", "parts/", "sdist/", "var/", "wheels/", "share/python-wheels/", "*.egg-info/",
".installed.cfg", "*.egg", "MANIFEST", "# PyInstaller",
"# Usually these files are written by a python script from a template",
"# before PyInstaller builds the exe, so as to inject date/other infos into it.", "*.manifest", "*.spec",
"# Installer logs", "pip-log.txt", "pip-delete-this-directory.txt", "# Unit test / coverage reports",
"htmlcov/", ".tox/", ".nox/", ".coverage", ".coverage.*", ".cache", "nosetests.xml", "coverage.xml",
"*.cover", "*.py,cover", ".hypothesis/", ".pytest_cache/", "cover/", "# Translations", "*.mo", "*.pot",
"# Django stuff:", "*.log", "local_settings.py", "db.sqlite3", "db.sqlite3-journal", "# Flask stuff:",
"instance/", ".webassets-cache", "# Scrapy stuff:", ".scrapy", "# Sphinx documentation", "docs/_build/",
"# PyBuilder", ".pybuilder/", "target/", "# Jupyter Notebook", ".ipynb_checkpoints", "# IPython",
"profile_default/", "ipython_config.py", "# pyenv",
"# For a library or package, you might want to ignore these files since the code is",
"# intended to run in multiple environments; otherwise, check them in:", "# .python-version", "# pipenv",
"# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.",
"# However, in case of collaboration, if having platform-specific dependencies or dependencies",
"# having no cross-platform support, pipenv may install dependencies that don't work, or not",
"# install all needed dependencies.", "#Pipfile.lock", "# poetry",
"# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.",
"# This is especially recommended for binary packages to ensure reproducibility, and is more",
"# commonly ignored for libraries.",
"# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control",
"#poetry.lock", "# pdm",
"# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.",
"#pdm.lock",
"# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it",
"# in version control.", "# https://pdm.fming.dev/#use-with-ide", ".pdm.toml",
"# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm", "__pypackages__/",
"# Celery stuff", "celerybeat-schedule", "celerybeat.pid", "# SageMath parsed files", "*.sage.py",
"# Environments", ".env", ".venv", "env/", "venv/", "ENV/", "env.bak/", "venv.bak/",
"# Spyder project settings", ".spyderproject", ".spyproject", "# Rope project settings", ".ropeproject",
"# mkdocs documentation", "/site", "# mypy", ".mypy_cache/", ".dmypy.json", "dmypy.json",
"# Pyre type checker", ".pyre/", "# pytype static type analyzer", ".pytype/", "# Cython debug symbols",
"cython_debug/", "# PyCharm",
"# JetBrains specific template is maintained in a separate JetBrains.gitignore that can",
"# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore",
"# and can be added to the global gitignore or merged into this file. For a more nuclear",
"# option (not recommended) you can uncomment the following to ignore the entire idea folder.", "#.idea/"
];
#endregion
private static readonly Argument<string> NameArgument =
new(name: "name", description: "The name of your project");
private static readonly Option<string> EntryPoint =
new(name: "--entrypoint", description: "The name of the entrypoint file");
private static readonly Option<string> BranchOption =
new(name: "--branch", description: "The name of the initial git branch");
private static readonly Option<IEnumerable<string>> FeaturesOption =
new(name: "--features", description: "The features to include")
{
AllowMultipleArgumentsPerToken = true
};
private readonly IServiceProvider _services;
private readonly List<IFeature> _features;
public Init(IServiceProvider services) : base("init", "Initialize a new project")
{
AddArgument(NameArgument);
AddOption(EntryPoint);
AddOption(BranchOption);
AddOption(FeaturesOption);
this.SetHandler(ExecuteAsync, NameArgument, EntryPoint, BranchOption, FeaturesOption);
_services = services;
_features = _services.GetServices<IFeature>().ToList();
}
private async Task ExecuteAsync(string name, string entrypoint, string branch, IEnumerable<string> features)
{
var featuresList = features.ToList();
Validate(featuresList);
await Initialize(name, entrypoint, branch, featuresList);
}
private void Validate(List<string> features)
{
foreach (var feature in features)
if (_features.All(f => f.Name != feature))
throw new Exception($"Feature {feature} was not found.");
}
private async Task Initialize(string name, string entrypoint, string branch, List<string> features)
{
// Create the project directory and change the directory for the ProjectContext
var projectRoot = await CreateDirectory(name);
var ctx = _services.GetRequiredService<ProjectContext>();
ctx.ChangeDirectory(projectRoot);
// Create the config file and initialize the config
await ctx.CreateFile("m4g.json", "{}");
await ctx.LoadConfig(force: true);
// Create the entrypoint file
entrypoint = string.IsNullOrEmpty(entrypoint) ? "main.py" : entrypoint;
await ctx.CreateFile(entrypoint, string.Empty);
ctx.Config.Entrypoint = entrypoint;
// Create the default .gitignore
await ctx.CreateFile(".gitignore", GitIgnore);
// Create the venv
await Bash.ExecuteAsync($"python3 -m venv {Path.Combine(projectRoot, ".venv")}");
// Initialize git
var _branch = string.IsNullOrEmpty(branch) ? "main" : branch;
await Bash.ExecuteAsync($"git -c init.defaultBranch={_branch} init {projectRoot}");
// Initialize features
if (features.Count > 0)
await InitializeFeatures(ctx, features);
Console.WriteLine($"Directory {projectRoot} was successfully initialized");
}
private async Task<string> CreateDirectory(string name)
{
var directory = Path.Combine(Directory.GetCurrentDirectory(), name);
if (Directory.Exists(directory))
throw new Exception($"Directory {directory} already exists.");
Console.WriteLine($"Creating directory {directory}");
Directory.CreateDirectory(directory);
await Bash.ExecuteAsync($"chmod -R 777 {directory}");
return directory;
}
private async Task InitializeFeatures(ProjectContext projectCtx, List<string> features)
{
foreach (var feature in features)
await _features.First(p => p.Name == feature).ExecuteAsync(projectCtx);
}
}
}

View File

@ -0,0 +1,29 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public class Install : Command, ISubCommandOf<MicroForge>
{
private static readonly Argument<IEnumerable<string>> PackagesArgument =
new(name: "packages", description: "The names of the packages to install");
public Install() : base("install", "Install packages and update the requirements.txt")
{
AddAlias("i");
AddArgument(PackagesArgument);
this.SetHandler(ExecuteAsync, PackagesArgument);
}
private async Task ExecuteAsync(IEnumerable<string> packages)
{
await Bash.ExecuteAsync(
"source .venv/bin/activate",
$"pip install {string.Join(' ', packages)}",
"pip freeze > requirements.txt"
);
}
}
}

View File

@ -0,0 +1,27 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Migrations
{
public class Apply : Command, ISubCommandOf<Migrations>
{
public Apply() : base("apply", "Apply migrations to the database")
{
AddAlias("a");
this.SetHandler(ExecuteAsync);
}
private async Task ExecuteAsync()
{
await Bash.ExecuteAsync([
"source .venv/bin/activate",
"alembic upgrade head"
]);
}
}
}
}

View File

@ -0,0 +1,27 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Migrations
{
public class Rollback : Command, ISubCommandOf<Migrations>
{
public Rollback() : base("rollback", "Rollback the last migration")
{
AddAlias("r");
this.SetHandler(ExecuteAsync);
}
private async Task ExecuteAsync()
{
await Bash.ExecuteAsync([
"source .venv/bin/activate",
"alembic downgrade -1"
]);
}
}
}
}

View File

@ -0,0 +1,18 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public partial class Migrations : Command, ISubCommandOf<MicroForge>
{
public Migrations(IEnumerable<ISubCommandOf<Migrations>> subCommands) :
base("migrations", "Manage your migrations")
{
AddAlias("m");
foreach (var subCommandOf in subCommands)
AddCommand((subCommandOf as Command)!);
}
}
}

View File

@ -0,0 +1,24 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public class Rewrite : Command, ISubCommandOf<MicroForge>
{
public Rewrite() : base("rewrite", "Test a python source rewriter.")
{
this.SetHandler(ExecuteAsync);
}
private async Task ExecuteAsync()
{
// var path = Path.Combine(Directory.GetCurrentDirectory(), "main.py");
// var source = await File.ReadAllTextAsync(path);
// var rewriter = new TestRewriter(source);
// var rewrite = rewriter.Rewrite();
// await File.WriteAllTextAsync(path, rewrite);
}
}
}

View File

@ -0,0 +1,29 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge
{
public class Uninstall : Command, ISubCommandOf<MicroForge>
{
private static readonly Argument<IEnumerable<string>> PackagesArgument =
new(name: "packages", description: "The names of the packages to uninstall");
public Uninstall() : base("uninstall", "Uninstall packages and update the requirements.txt")
{
AddAlias("u");
AddArgument(PackagesArgument);
this.SetHandler(ExecuteAsync, PackagesArgument);
}
private async Task ExecuteAsync(IEnumerable<string> packages)
{
await Bash.ExecuteAsync(
"source .venv/bin/activate",
$"pip uninstall {string.Join(' ', packages)}",
"pip freeze > requirements.txt"
);
}
}
}

View File

@ -0,0 +1,17 @@
using System.CommandLine;
using MicroForge.CLI.Commands.Interfaces;
namespace MicroForge.CLI.Commands;
public partial class MicroForge : RootCommand
{
public override string Name => "m4g";
public MicroForge(IEnumerable<ISubCommandOf<MicroForge>> commands) : base("The MicroForge CLI tool.")
{
commands
.Cast<Command>()
.ToList()
.ForEach(AddCommand);
}
}

View File

@ -0,0 +1,13 @@
namespace MicroForge.CLI.Exceptions;
public class BashException : Exception
{
private readonly string _message;
public override string Message => _message;
public BashException(string message) : base(message)
{
_message = message;
}
}

View File

@ -0,0 +1,20 @@
using System.Text.Json;
namespace MicroForge.CLI.Extensions;
public static class ObjectStreamExtensions
{
public static async Task<string> SerializeAsync(
this object @object,
JsonSerializerOptions? jsonSerializerOptions = null
)
{
using var stream = new MemoryStream();
using var reader = new StreamReader(stream);
var options = jsonSerializerOptions ?? Shared.DefaultJsonSerializerOptions.Default;
await JsonSerializer.SerializeAsync(stream, @object, options);
stream.Position = 0;
return await reader.ReadToEndAsync();
}
}

View File

@ -0,0 +1,46 @@
using MicroForge.CLI.Commands.Interfaces;
using MicroForge.CLI.Features;
using Microsoft.Extensions.DependencyInjection;
namespace MicroForge.CLI.Extensions;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddServices(this IServiceCollection services, string[] args)
{
services.AddScoped<ArgsContext>(_ => new ArgsContext { Args = args });
services.AddScoped<ProjectContext>();
services.AddScoped<IFeature, Api>();
services.AddScoped<IFeature, Orm>();
return services;
}
public static IServiceCollection AddCommands(this IServiceCollection services)
{
// Register "m4g"
services.AddScoped<Commands.MicroForge>();
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Init>();
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Run>();
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Install>();
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Uninstall>();
// Register "m4g add"
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Add>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Add>, Commands.MicroForge.Add.Api>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Add>, Commands.MicroForge.Add.Orm>();
// Register "m4g generate"
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Generate>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Generate>, Commands.MicroForge.Generate.Entity>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Generate>, Commands.MicroForge.Generate.Router>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Generate>, Commands.MicroForge.Generate.Migration>();
// Register "m4g migrations"
services.AddScoped<ISubCommandOf<Commands.MicroForge>, Commands.MicroForge.Migrations>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Migrations>, Commands.MicroForge.Migrations.Apply>();
services.AddScoped<ISubCommandOf<Commands.MicroForge.Migrations>, Commands.MicroForge.Migrations.Rollback>();
return services;
}
}

View File

@ -0,0 +1,64 @@
namespace MicroForge.CLI.Features;
public sealed class Api : IFeature
{
#region Main
private static readonly string[] HelloRouter =
[
"from fastapi import APIRouter",
"from fastapi.responses import JSONResponse",
"from fastapi.encoders import jsonable_encoder",
"",
"router = APIRouter()",
"",
"@router.get(\"/{name}\")",
"async def greet(name: str):",
"\treturn JSONResponse(status_code=200, content=jsonable_encoder({'greeting': f\"Hello, {name}!\"}))"
];
private static readonly string[] Main =
[
"from fastapi import FastAPI",
"app = FastAPI()",
"",
"from api.routers import hello",
"app.include_router(prefix=\"/hello\", router=hello.router)"
];
#endregion
public const string FeatureName = "api";
public string Name => FeatureName;
public async Task ExecuteAsync(ProjectContext context)
{
if (context.Config.Features.Contains(FeatureName))
{
Console.WriteLine($"Feature {FeatureName} has already been initialized.");
return;
}
Console.WriteLine(string.Join("\n", [
$"Adding feature {FeatureName}",
"Requirements:",
" - fastapi",
" - uvicorn[standard]",
]));
await Bash.ExecuteAsync(
"source .venv/bin/activate",
"python3 -m pip install fastapi uvicorn[standard]",
"python3 -m pip freeze > requirements.txt"
);
await context.CreateFile("api/routers/hello.py", HelloRouter);
var main = await context.ReadFile("main.py");
main = string.Join('\n', Main) + main;
await context.WriteFile("main.py", main);
context.Config.Features.Add(FeatureName);
}
}

View File

@ -0,0 +1,8 @@
namespace MicroForge.CLI.Features;
public interface IFeature
{
public string Name { get; }
public Task ExecuteAsync(ProjectContext context);
}

View File

@ -0,0 +1,95 @@
using MicroForge.CLI.CodeGen;
namespace MicroForge.CLI.Features;
public sealed class Orm : IFeature
{
#region Defaults
private static readonly string[] Settings =
[
"connectionstring = \"mysql+asyncmy://root:root@localhost:3306/example\"",
"",
"class OrmSettings:",
"\tdef get_connectionstring() -> str:",
"\t\treturn connectionstring"
];
private static readonly string[] AsyncSession =
[
"from sqlalchemy.ext.asyncio import create_async_engine, AsyncEngine, AsyncSession",
"from orm.settings import OrmSettings",
"",
"async_engine: AsyncEngine = create_async_engine(OrmSettings.get_connectionstring())",
"",
"def async_session():",
"\treturn AsyncSession(async_engine, expire_on_commit=False)"
];
private static readonly string[] EntityBase =
[
"from sqlalchemy.orm import DeclarativeBase",
"",
"class EntityBase(DeclarativeBase):",
"\tpass"
];
private static readonly string[] User =
[
"from sqlalchemy import INTEGER, Column, String",
"from orm.entities.entity_base import EntityBase",
"",
"class User(EntityBase):",
"\t__tablename__ = \"users\"",
"\tid = Column(INTEGER, primary_key=True)",
"\tfirstname = Column(String(255))",
"\tlastname = Column(String(255))\n",
"def __repr__(self) -> str:",
"\treturn f\"User(id={self.id!r}, firstname={self.firstname!r}, lastname={self.lastname!r})\""
];
#endregion
public const string FeatureName = "orm";
public string Name => FeatureName;
public async Task ExecuteAsync(ProjectContext context)
{
if (context.Config.Features.Contains(FeatureName))
{
Console.WriteLine($"Feature {FeatureName} has already been initialized.");
return;
}
Console.WriteLine(string.Join("\n", [
$"Adding feature {FeatureName}",
"Requirements:",
" - asyncmy",
" - sqlalchemy",
" - alembic",
]));
await Bash.ExecuteAsync(
"source .venv/bin/activate",
"python3 -m pip install asyncmy sqlalchemy alembic",
"python3 -m pip freeze > requirements.txt",
"alembic init -t async orm"
);
var env = await context.ReadFile("orm/env.py");
env = new OrmEnvInitializer(env).Rewrite();
env = new OrmEnvUpdater(env, "user", "User").Rewrite();
await context.WriteFile("orm/env.py", env);
await context.CreateFile("orm/settings.py", Settings);
await context.CreateFile("orm/engine/async_session.py", AsyncSession);
await context.CreateFile("orm/entities/entity_base.py", EntityBase);
await context.CreateFile("orm/entities/user.py", User);
context.Config.Features.Add(FeatureName);
}
}

View File

@ -0,0 +1,38 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<PackAsTool>true</PackAsTool>
<ToolCommandName>m4g</ToolCommandName>
<PackageOutputPath>./nupkg</PackageOutputPath>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\MicroForge.Parsing\MicroForge.Parsing.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Antlr4.Runtime" Version="4.6.6" />
<PackageReference Include="Humanizer" Version="2.14.1" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="8.0.0" />
<PackageReference Include="System.CommandLine" Version="2.0.0-beta4.22272.1" />
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="5.0.0" />
</ItemGroup>
<ItemGroup>
<None Update="scripts\env.py">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<Reference Include="MicroForge.Parsing">
<HintPath>bin\Debug\net8.0\MicroForge.Parsing.dll</HintPath>
</Reference>
</ItemGroup>
</Project>

33
MicroForge.CLI/Program.cs Normal file
View File

@ -0,0 +1,33 @@
using System.CommandLine;
using MicroForge.CLI;
using MicroForge.CLI.CodeGen;
using MicroForge.CLI.Exceptions;
using MicroForge.CLI.Extensions;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using RootCommand = MicroForge.CLI.Commands.MicroForge;
using var host = Host
.CreateDefaultBuilder()
.ConfigureServices((_, services) =>
{
services
.AddServices(args)
.AddCommands();
})
.Build();
try
{
var ctx = host.Services.GetRequiredService<ProjectContext>();
await ctx.LoadConfig();
await host.Services.GetRequiredService<RootCommand>().InvokeAsync(args);
await ctx.SaveConfig();
}
catch
{
// Console.WriteLine(e.Message);
}
// var src = new OrmEnvInitializer(await File.ReadAllTextAsync("scripts/env.py")).Rewrite();
// Console.WriteLine(src);

View File

@ -0,0 +1,7 @@
namespace MicroForge.CLI;
public class ProjectConfig
{
public string Entrypoint { get; set; } = string.Empty;
public List<string> Features { get; set; } = new();
}

View File

@ -0,0 +1,83 @@
using System.Text.Json;
using MicroForge.CLI.Extensions;
namespace MicroForge.CLI;
public class ProjectContext
{
public string RootDirectory { get; private set; } = Environment.CurrentDirectory;
public string ConfigPath => Path.Combine(RootDirectory, "m4g.json");
public ProjectConfig Config { get; private set; } = default!;
private readonly ArgsContext _argsContext;
public ProjectContext(ArgsContext argsContext)
{
_argsContext = argsContext;
}
public async Task LoadConfig(bool force = false)
{
if (_argsContext.Args
is ["init", ..]
or ["-?", ..]
or ["-h", ..]
or ["--help"]
or ["--version"] && !force)
return;
if (!File.Exists(ConfigPath))
throw new FileNotFoundException($"File {ConfigPath} does not exist.");
Config = (await JsonSerializer.DeserializeAsync<ProjectConfig>(
File.OpenRead(ConfigPath),
Shared.DefaultJsonSerializerOptions.CamelCasePrettyPrint
))!;
}
public void ChangeDirectory(string path)
{
Directory.SetCurrentDirectory(path);
RootDirectory = path;
}
public async Task CreateFile(string path, params string[] content)
{
var fullPath = Path.Combine(RootDirectory, path);
var fileInfo = new FileInfo(fullPath);
if (fileInfo.Exists) return;
Directory.CreateDirectory(fileInfo.Directory!.FullName);
await File.WriteAllTextAsync(fullPath, string.Join("\n", content));
await Bash.ExecuteAsync($"chmod 777 {fullPath}");
}
public async Task WriteFile(string path, params string[] content)
{
var fullPath = Path.Combine(RootDirectory, path);
var fileInfo = new FileInfo(fullPath);
Directory.CreateDirectory(fileInfo.Directory!.FullName);
await File.WriteAllTextAsync(fullPath, string.Join("\n", content));
}
public async Task<string> ReadFile(string path)
{
var fullPath = Path.Combine(RootDirectory, path);
var fileInfo = new FileInfo(fullPath);
if (!fileInfo.Exists)
throw new Exception($"File {fullPath} does not exist.");
return await File.ReadAllTextAsync(fullPath);
}
public async Task SaveConfig()
{
if (Config is not null)
{
var json = await Config.SerializeAsync(Shared.DefaultJsonSerializerOptions.CamelCasePrettyPrint);
await File.WriteAllTextAsync(ConfigPath, json);
}
}
}

20
MicroForge.CLI/Shared.cs Normal file
View File

@ -0,0 +1,20 @@
using System.Text.Json;
namespace MicroForge.CLI;
public static class Shared
{
public static class DefaultJsonSerializerOptions
{
public static readonly JsonSerializerOptions Default = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public static readonly JsonSerializerOptions CamelCasePrettyPrint = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true
};
}
}

View File

@ -0,0 +1,90 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,5 @@
#!/usr/bin/bash
dotnet pack -v d
dotnet tool update --global --add-source ./nupkg MicroForge.CLI -v d

View File

@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Antlr4" Version="4.6.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Antlr4.CodeGenerator" Version="4.6.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Antlr4.Runtime" Version="4.6.6" />
</ItemGroup>
<ItemGroup>
<Content Include="PythonLexer.g4" />
<Content Include="PythonParser.g4" />
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,493 @@
/*
The MIT License (MIT)
Copyright (c) 2021 Robert Einhorn
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
* Project : Python Indent/Dedent handler for ANTLR4 grammars
*
* Developed by : Robert Einhorn
*/
using Antlr4.Runtime;
using System.Text.RegularExpressions;
namespace MicroForge.Parsing;
public abstract class PythonLexerBase : Lexer
{
// A stack that keeps track of the indentation lengths
private Stack<int> indentLengthStack;
// A list where tokens are waiting to be loaded into the token stream
private LinkedList<IToken> pendingTokens;
// last pending token types
private int previousPendingTokenType;
private int lastPendingTokenTypeFromDefaultChannel;
// The amount of opened parentheses, square brackets, or curly braces
private int opened;
// The amount of opened parentheses and square brackets in the current lexer mode
private Stack<int> paren_or_bracket_openedStack;
private bool wasSpaceIndentation;
private bool wasTabIndentation;
private bool wasIndentationMixedWithSpacesAndTabs;
private const int INVALID_LENGTH = -1;
private CommonToken curToken; // current (under processing) token
private IToken ffgToken; // following (look ahead) token
private const string ERR_TXT = " ERROR: ";
protected PythonLexerBase(ICharStream input) : base(input)
{
this.Init();
}
private void Init()
{
this.indentLengthStack = new Stack<int>();
this.pendingTokens = new LinkedList<IToken>();
this.previousPendingTokenType = 0;
this.lastPendingTokenTypeFromDefaultChannel = 0;
this.opened = 0;
this.paren_or_bracket_openedStack = new Stack<int>();
this.wasSpaceIndentation = false;
this.wasTabIndentation = false;
this.wasIndentationMixedWithSpacesAndTabs = false;
this.curToken = null!;
this.ffgToken = null!;
}
public override IToken NextToken() // reading the input stream until a return EOF
{
this.CheckNextToken();
IToken firstPendingToken = this.pendingTokens.First.Value;
this.pendingTokens.RemoveFirst();
return firstPendingToken; // add the queued token to the token stream
}
private void CheckNextToken()
{
if (this.previousPendingTokenType != TokenConstants.Eof)
{
this.SetCurrentAndFollowingTokens();
if (this.indentLengthStack.Count == 0) // We're at the first token
{
this.HandleStartOfInput();
}
switch (this.curToken.Type)
{
case PythonLexer.LPAR:
case PythonLexer.LSQB:
case PythonLexer.LBRACE:
this.opened++;
this.AddPendingToken(this.curToken);
break;
case PythonLexer.RPAR:
case PythonLexer.RSQB:
case PythonLexer.RBRACE:
this.opened--;
this.AddPendingToken(this.curToken);
break;
case PythonLexer.NEWLINE:
this.HandleNEWLINEtoken();
break;
case PythonLexer.STRING:
this.HandleSTRINGtoken();
break;
case PythonLexer.FSTRING_MIDDLE:
this.HandleFSTRING_MIDDLE_token();
break;
case PythonLexer.ERROR_TOKEN:
this.ReportLexerError("token recognition error at: '" + this.curToken.Text + "'");
this.AddPendingToken(this.curToken);
break;
case TokenConstants.Eof:
this.HandleEOFtoken();
break;
default:
this.AddPendingToken(this.curToken);
break;
}
this.HandleFORMAT_SPECIFICATION_MODE();
}
}
private void SetCurrentAndFollowingTokens()
{
this.curToken = this.ffgToken == null ?
new CommonToken(base.NextToken()) :
new CommonToken(this.ffgToken);
this.HandleFStringLexerModes();
this.ffgToken = this.curToken.Type == TokenConstants.Eof ?
this.curToken :
base.NextToken();
}
// initialize the _indentLengths
// hide the leading NEWLINE token(s)
// if exists, find the first statement (not NEWLINE, not EOF token) that comes from the default channel
// insert a leading INDENT token if necessary
private void HandleStartOfInput()
{
// initialize the stack with a default 0 indentation length
this.indentLengthStack.Push(0); // this will never be popped off
while (this.curToken.Type != TokenConstants.Eof)
{
if (this.curToken.Channel == TokenConstants.DefaultChannel)
{
if (this.curToken.Type == PythonLexer.NEWLINE)
{
// all the NEWLINE tokens must be ignored before the first statement
this.HideAndAddPendingToken(this.curToken);
}
else
{ // We're at the first statement
this.InsertLeadingIndentToken();
return; // continue the processing of the current token with CheckNextToken()
}
}
else
{
this.AddPendingToken(this.curToken); // it can be WS, EXPLICIT_LINE_JOINING, or COMMENT token
}
this.SetCurrentAndFollowingTokens();
} // continue the processing of the EOF token with CheckNextToken()
}
private void InsertLeadingIndentToken()
{
if (this.previousPendingTokenType == PythonLexer.WS)
{
var prevToken = this.pendingTokens.Last.Value;
if (this.GetIndentationLength(prevToken.Text) != 0) // there is an "indentation" before the first statement
{
const string errMsg = "first statement indented";
this.ReportLexerError(errMsg);
// insert an INDENT token before the first statement to raise an 'unexpected indent' error later by the parser
this.CreateAndAddPendingToken(PythonLexer.INDENT, TokenConstants.DefaultChannel, PythonLexerBase.ERR_TXT + errMsg, this.curToken);
}
}
}
private void HandleNEWLINEtoken()
{
if (this.opened > 0)
{
// We're in an implicit line joining, ignore the current NEWLINE token
this.HideAndAddPendingToken(this.curToken);
}
else
{
CommonToken nlToken = new CommonToken(this.curToken); // save the current NEWLINE token
bool isLookingAhead = this.ffgToken.Type == PythonLexer.WS;
if (isLookingAhead)
{
this.SetCurrentAndFollowingTokens(); // set the next two tokens
}
switch (this.ffgToken.Type)
{
case PythonLexer.NEWLINE: // We're before a blank line
case PythonLexer.COMMENT: // We're before a comment
case PythonLexer.TYPE_COMMENT: // We're before a type comment
this.HideAndAddPendingToken(nlToken);
if (isLookingAhead)
{
this.AddPendingToken(this.curToken); // WS token
}
break;
default:
this.AddPendingToken(nlToken);
if (isLookingAhead)
{ // We're on whitespace(s) followed by a statement
int indentationLength = this.ffgToken.Type == TokenConstants.Eof ?
0 :
this.GetIndentationLength(this.curToken.Text);
if (indentationLength != PythonLexerBase.INVALID_LENGTH)
{
this.AddPendingToken(this.curToken); // WS token
this.InsertIndentOrDedentToken(indentationLength); // may insert INDENT token or DEDENT token(s)
}
else
{
this.ReportError("inconsistent use of tabs and spaces in indentation");
}
}
else
{
// We're at a newline followed by a statement (there is no whitespace before the statement)
this.InsertIndentOrDedentToken(0); // may insert DEDENT token(s)
}
break;
}
}
}
private void InsertIndentOrDedentToken(int indentLength)
{
//*** https://docs.python.org/3/reference/lexical_analysis.html#indentation
int prevIndentLength = this.indentLengthStack.Peek();
if (indentLength > prevIndentLength)
{
this.CreateAndAddPendingToken(PythonLexer.INDENT, TokenConstants.DefaultChannel, null, this.ffgToken);
this.indentLengthStack.Push(indentLength);
}
else
{
while (indentLength < prevIndentLength)
{ // more than 1 DEDENT token may be inserted into the token stream
this.indentLengthStack.Pop();
prevIndentLength = this.indentLengthStack.Peek();
if (indentLength <= prevIndentLength)
{
this.CreateAndAddPendingToken(PythonLexer.DEDENT, TokenConstants.DefaultChannel, null, this.ffgToken);
}
else
{
this.ReportError("inconsistent dedent");
}
}
}
}
private void HandleSTRINGtoken()
{
// remove the \<newline> escape sequences from the string literal
// https://docs.python.org/3.11/reference/lexical_analysis.html#string-and-bytes-literals
string line_joinFreeStringLiteral = Regex.Replace(this.curToken.Text, @"\\\r?\n", "");
if (this.curToken.Text.Length == line_joinFreeStringLiteral.Length)
{
this.AddPendingToken(this.curToken);
}
else
{
CommonToken originalSTRINGtoken = new CommonToken(this.curToken); // backup the original token
this.curToken.Text = line_joinFreeStringLiteral;
this.AddPendingToken(this.curToken); // add the modified token with inline string literal
this.HideAndAddPendingToken(originalSTRINGtoken); // add the original token with a hidden channel
// this inserted hidden token allows to restore the original string literal with the \<newline> escape sequences
}
}
private void HandleFSTRING_MIDDLE_token() // replace the double braces '{{' or '}}' to single braces and hide the second braces
{
string fsMid = this.curToken.Text;
fsMid = fsMid.Replace("{{", "{_").Replace("}}", "}_"); // replace: {{ --> {_ and }} --> }_
Regex regex = new Regex(@"(?<=[{}])_");
string[] arrOfStr = regex.Split(fsMid); // split by {_ or }_
foreach (string s in arrOfStr)
{
if (!String.IsNullOrEmpty(s))
{
this.CreateAndAddPendingToken(PythonLexer.FSTRING_MIDDLE, TokenConstants.DefaultChannel, s, this.ffgToken);
string lastCharacter = s.Substring(s.Length - 1);
if ("{}".Contains(lastCharacter))
{
this.CreateAndAddPendingToken(PythonLexer.FSTRING_MIDDLE, TokenConstants.HiddenChannel, lastCharacter, this.ffgToken);
// this inserted hidden token allows to restore the original f-string literal with the double braces
}
}
}
}
private void HandleFStringLexerModes() // https://peps.python.org/pep-0498/#specification
{
if (this._modeStack.Count > 0)
{
switch (this.curToken.Type)
{
case PythonLexer.LBRACE:
this.PushMode(PythonLexer.DefaultMode);
this.paren_or_bracket_openedStack.Push(0);
break;
case PythonLexer.LPAR:
case PythonLexer.LSQB:
// https://peps.python.org/pep-0498/#lambdas-inside-expressions
this.paren_or_bracket_openedStack.Push(this.paren_or_bracket_openedStack.Pop() + 1); // increment the last element
break;
case PythonLexer.RPAR:
case PythonLexer.RSQB:
this.paren_or_bracket_openedStack.Push(this.paren_or_bracket_openedStack.Pop() - 1); // decrement the last element
break;
case PythonLexer.COLON: // colon can only come from DEFAULT_MODE
if (this.paren_or_bracket_openedStack.Peek() == 0)
{
switch (this._modeStack.First()) // check the previous lexer mode (the current is DEFAULT_MODE)
{
case PythonLexer.SINGLE_QUOTE_FSTRING_MODE:
case PythonLexer.LONG_SINGLE_QUOTE_FSTRING_MODE:
case PythonLexer.SINGLE_QUOTE_FORMAT_SPECIFICATION_MODE:
this.Mode(PythonLexer.SINGLE_QUOTE_FORMAT_SPECIFICATION_MODE); // continue in format spec. mode
break;
case PythonLexer.DOUBLE_QUOTE_FSTRING_MODE:
case PythonLexer.LONG_DOUBLE_QUOTE_FSTRING_MODE:
case PythonLexer.DOUBLE_QUOTE_FORMAT_SPECIFICATION_MODE:
this.Mode(PythonLexer.DOUBLE_QUOTE_FORMAT_SPECIFICATION_MODE); // continue in format spec. mode
break;
}
}
break;
case PythonLexer.RBRACE:
switch (_mode)
{
case PythonLexer.DefaultMode:
case PythonLexer.SINGLE_QUOTE_FORMAT_SPECIFICATION_MODE:
case PythonLexer.DOUBLE_QUOTE_FORMAT_SPECIFICATION_MODE:
this.PopMode();
this.paren_or_bracket_openedStack.Pop();
break;
default:
this.ReportLexerError("f-string: single '}' is not allowed");
break;
}
break;
}
}
}
private void HandleFORMAT_SPECIFICATION_MODE()
{
if (this._modeStack.Count > 0 && this.ffgToken.Type == PythonLexer.RBRACE)
{
switch (this.curToken.Type)
{
case PythonLexer.COLON:
case PythonLexer.RBRACE:
// insert an empty FSTRING_MIDDLE token instead of the missing format specification
this.CreateAndAddPendingToken(PythonLexer.FSTRING_MIDDLE, TokenConstants.DefaultChannel, "", this.ffgToken);
break;
}
}
}
private void InsertTrailingTokens()
{
switch (this.lastPendingTokenTypeFromDefaultChannel)
{
case PythonLexer.NEWLINE:
case PythonLexer.DEDENT:
break; // no trailing NEWLINE token is needed
default:
// insert an extra trailing NEWLINE token that serves as the end of the last statement
this.CreateAndAddPendingToken(PythonLexer.NEWLINE, TokenConstants.DefaultChannel, null, this.ffgToken); // ffgToken is EOF
break;
}
this.InsertIndentOrDedentToken(0); // Now insert as many trailing DEDENT tokens as needed
}
private void HandleEOFtoken()
{
if (this.lastPendingTokenTypeFromDefaultChannel > 0)
{ // there was a statement in the input (leading NEWLINE tokens are hidden)
this.InsertTrailingTokens();
}
this.AddPendingToken(this.curToken);
}
private void HideAndAddPendingToken(CommonToken cToken)
{
cToken.Channel = TokenConstants.HiddenChannel;
this.AddPendingToken(cToken);
}
private void CreateAndAddPendingToken(int type, int channel, string text, IToken baseToken)
{
CommonToken cToken = new CommonToken(baseToken);
cToken.Type = type;
cToken.Channel = channel;
cToken.StopIndex = baseToken.StartIndex - 1;
// cToken.Text = text == null
// ? "<" + Vocabulary.GetSymbolicName(type) + ">"
// : text;
cToken.Text = text ?? string.Empty;
this.AddPendingToken(cToken);
}
private void AddPendingToken(IToken token)
{
// save the last pending token type because the pendingTokens linked list can be empty by the nextToken()
this.previousPendingTokenType = token.Type;
if (token.Channel == TokenConstants.DefaultChannel)
{
this.lastPendingTokenTypeFromDefaultChannel = this.previousPendingTokenType;
}
this.pendingTokens.AddLast(token);
}
private int GetIndentationLength(string textWS) // the textWS may contain spaces, tabs or form feeds
{
const int TAB_LENGTH = 8; // the standard number of spaces to replace a tab with spaces
int length = 0;
foreach (char ch in textWS)
{
switch (ch)
{
case ' ':
this.wasSpaceIndentation = true;
length += 1;
break;
case '\t':
this.wasTabIndentation = true;
length += TAB_LENGTH - (length % TAB_LENGTH);
break;
case '\f': // form feed
length = 0;
break;
}
}
if (this.wasTabIndentation && this.wasSpaceIndentation)
{
if (!this.wasIndentationMixedWithSpacesAndTabs)
{
this.wasIndentationMixedWithSpacesAndTabs = true;
return PythonLexerBase.INVALID_LENGTH; // only for the first inconsistent indent
}
}
return length;
}
private void ReportLexerError(string errMsg)
{
// this.ErrorListenerDispatch.SyntaxError(this.ErrorOutput, this, this.curToken.Type, this.curToken.Line, this.curToken.Column, " LEXER" + PythonLexerBase.ERR_TXT + errMsg, null);
this.ErrorListenerDispatch.SyntaxError( this, this.curToken.Type, this.curToken.Line, this.curToken.Column, " LEXER" + PythonLexerBase.ERR_TXT + errMsg, null);
}
private void ReportError(string errMsg)
{
this.ReportLexerError(errMsg);
// the ERROR_TOKEN will raise an error in the parser
this.CreateAndAddPendingToken(PythonLexer.ERROR_TOKEN, TokenConstants.DefaultChannel, PythonLexerBase.ERR_TXT + errMsg, this.ffgToken);
}
public override void Reset()
{
this.Init();
base.Reset();
}
}

View File

@ -0,0 +1,880 @@
/*
Python grammar
The MIT License (MIT)
Copyright (c) 2021 Robert Einhorn
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
* Project : an ANTLR4 parser grammar by the official PEG grammar
* https://github.com/RobEin/ANTLR4-parser-for-Python-3.12
* Developed by : Robert Einhorn
*
*/
parser grammar PythonParser; // Python 3.12.1 https://docs.python.org/3.12/reference/grammar.html#full-grammar-specification
options {
tokenVocab=PythonLexer;
superClass=PythonParserBase;
}
// STARTING RULES
// ==============
file_input: statements? EOF;
interactive: statement_newline;
eval: expressions NEWLINE* EOF;
func_type: '(' type_expressions? ')' '->' expression NEWLINE* EOF;
fstring_input: star_expressions;
// GENERAL STATEMENTS
// ==================
statements: statement+;
statement: compound_stmt | simple_stmts;
statement_newline
: compound_stmt NEWLINE
| simple_stmts
| NEWLINE
| EOF;
simple_stmts
: simple_stmt (';' simple_stmt)* ';'? NEWLINE
;
// NOTE: assignment MUST precede expression, else parsing a simple assignment
// will throw a SyntaxError.
simple_stmt
: assignment
| type_alias
| star_expressions
| return_stmt
| import_stmt
| raise_stmt
| 'pass'
| del_stmt
| yield_stmt
| assert_stmt
| 'break'
| 'continue'
| global_stmt
| nonlocal_stmt;
compound_stmt
: function_def
| if_stmt
| class_def
| with_stmt
| for_stmt
| try_stmt
| while_stmt
| match_stmt;
// SIMPLE STATEMENTS
// =================
// NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield'
assignment
: NAME ':' expression ('=' annotated_rhs )?
| ('(' single_target ')'
| single_subscript_attribute_target) ':' expression ('=' annotated_rhs )?
| (star_targets '=' )+ (yield_expr | star_expressions) TYPE_COMMENT?
| single_target augassign (yield_expr | star_expressions);
annotated_rhs: yield_expr | star_expressions;
augassign
: '+='
| '-='
| '*='
| '@='
| '/='
| '%='
| '&='
| '|='
| '^='
| '<<='
| '>>='
| '**='
| '//=';
return_stmt
: 'return' star_expressions?;
raise_stmt
: 'raise' (expression ('from' expression )?)?
;
global_stmt: 'global' NAME (',' NAME)*;
nonlocal_stmt: 'nonlocal' NAME (',' NAME)*;
del_stmt
: 'del' del_targets;
yield_stmt: yield_expr;
assert_stmt: 'assert' expression (',' expression )?;
import_stmt
: import_name
| import_from;
// Import statements
// -----------------
import_name: 'import' dotted_as_names;
// note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
import_from
: 'from' ('.' | '...')* dotted_name 'import' import_from_targets
| 'from' ('.' | '...')+ 'import' import_from_targets;
import_from_targets
: '(' import_from_as_names ','? ')'
| import_from_as_names
| '*';
import_from_as_names
: import_from_as_name (',' import_from_as_name)*;
import_from_as_name
: NAME ('as' NAME )?;
dotted_as_names
: dotted_as_name (',' dotted_as_name)*;
dotted_as_name
: dotted_name ('as' NAME )?;
dotted_name
: dotted_name '.' NAME
| NAME;
// COMPOUND STATEMENTS
// ===================
// Common elements
// ---------------
block
: NEWLINE INDENT statements DEDENT
| simple_stmts;
decorators: ('@' named_expression NEWLINE )+;
// Class definitions
// -----------------
class_def
: decorators class_def_raw
| class_def_raw;
class_def_raw
: 'class' NAME type_params? ('(' arguments? ')' )? ':' block;
// Function definitions
// --------------------
function_def
: decorators function_def_raw
| function_def_raw;
function_def_raw
: 'def' NAME type_params? '(' params? ')' ('->' expression )? ':' func_type_comment? block
| ASYNC 'def' NAME type_params? '(' params? ')' ('->' expression )? ':' func_type_comment? block;
// Function parameters
// -------------------
params
: parameters;
parameters
: slash_no_default param_no_default* param_with_default* star_etc?
| slash_with_default param_with_default* star_etc?
| param_no_default+ param_with_default* star_etc?
| param_with_default+ star_etc?
| star_etc;
// Some duplication here because we can't write (',' | {isCurrentTokenType(RPAR)}?),
// which is because we don't support empty alternatives (yet).
slash_no_default
: param_no_default+ '/' ','?
;
slash_with_default
: param_no_default* param_with_default+ '/' ','?
;
star_etc
: '*' param_no_default param_maybe_default* kwds?
| '*' param_no_default_star_annotation param_maybe_default* kwds?
| '*' ',' param_maybe_default+ kwds?
| kwds;
kwds
: '**' param_no_default;
// One parameter. This *includes* a following comma and type comment.
//
// There are three styles:
// - No default_assignment
// - With default_assignment
// - Maybe with default_assignment
//
// There are two alternative forms of each, to deal with type comments:
// - Ends in a comma followed by an optional type comment
// - No comma, optional type comment, must be followed by close paren
// The latter form is for a final parameter without trailing comma.
//
param_no_default
: param ','? TYPE_COMMENT?
;
param_no_default_star_annotation
: param_star_annotation ','? TYPE_COMMENT?
;
param_with_default
: param default_assignment ','? TYPE_COMMENT?
;
param_maybe_default
: param default_assignment? ','? TYPE_COMMENT?
;
param: NAME annotation?;
param_star_annotation: NAME star_annotation;
annotation: ':' expression;
star_annotation: ':' star_expression;
default_assignment: '=' expression;
// If statement
// ------------
if_stmt
: 'if' named_expression ':' block (elif_stmt | else_block?)
;
elif_stmt
: 'elif' named_expression ':' block (elif_stmt | else_block?)
;
else_block
: 'else' ':' block;
// While statement
// ---------------
while_stmt
: 'while' named_expression ':' block else_block?;
// For statement
// -------------
for_stmt
: ASYNC? 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?
;
// With statement
// --------------
with_stmt
: ASYNC? 'with' ( '(' with_item (',' with_item)* ','? ')' ':'
| with_item (',' with_item)* ':' TYPE_COMMENT?
) block
;
with_item
: expression ('as' star_target)?
;
// Try statement
// -------------
try_stmt
: 'try' ':' block finally_block
| 'try' ':' block except_block+ else_block? finally_block?
| 'try' ':' block except_star_block+ else_block? finally_block?;
// Except statement
// ----------------
except_block
: 'except' (expression ('as' NAME )?)? ':' block
;
except_star_block
: 'except' '*' expression ('as' NAME )? ':' block;
finally_block
: 'finally' ':' block;
// Match statement
// ---------------
match_stmt
: soft_kw_match subject_expr ':' NEWLINE INDENT case_block+ DEDENT;
subject_expr
: star_named_expression ',' star_named_expressions?
| named_expression;
case_block
: soft_kw_case patterns guard? ':' block;
guard: 'if' named_expression;
patterns
: open_sequence_pattern
| pattern;
pattern
: as_pattern
| or_pattern;
as_pattern
: or_pattern 'as' pattern_capture_target;
or_pattern
: closed_pattern ('|' closed_pattern)*;
closed_pattern
: literal_pattern
| capture_pattern
| wildcard_pattern
| value_pattern
| group_pattern
| sequence_pattern
| mapping_pattern
| class_pattern;
// Literal patterns are used for equality and identity constraints
literal_pattern
: signed_number
| complex_number
| strings
| 'None'
| 'True'
| 'False';
// Literal expressions are used to restrict permitted mapping pattern keys
literal_expr
: signed_number
| complex_number
| strings
| 'None'
| 'True'
| 'False';
complex_number
: signed_real_number ('+' | '-') imaginary_number
;
signed_number
: '-'? NUMBER
;
signed_real_number
: '-'? real_number
;
real_number
: NUMBER;
imaginary_number
: NUMBER;
capture_pattern
: pattern_capture_target;
pattern_capture_target
: soft_kw__not__wildcard;
wildcard_pattern
: soft_kw_wildcard;
value_pattern
: attr;
attr
: NAME ('.' NAME)+
;
name_or_attr
: NAME ('.' NAME)*
;
group_pattern
: '(' pattern ')';
sequence_pattern
: '[' maybe_sequence_pattern? ']'
| '(' open_sequence_pattern? ')';
open_sequence_pattern
: maybe_star_pattern ',' maybe_sequence_pattern?;
maybe_sequence_pattern
: maybe_star_pattern (',' maybe_star_pattern)* ','?;
maybe_star_pattern
: star_pattern
| pattern;
star_pattern
: '*' pattern_capture_target
| '*' wildcard_pattern;
mapping_pattern
: LBRACE RBRACE
| LBRACE double_star_pattern ','? RBRACE
| LBRACE items_pattern (',' double_star_pattern)? ','? RBRACE
;
items_pattern
: key_value_pattern (',' key_value_pattern)*;
key_value_pattern
: (literal_expr | attr) ':' pattern;
double_star_pattern
: '**' pattern_capture_target;
class_pattern
: name_or_attr '(' ((positional_patterns (',' keyword_patterns)? | keyword_patterns) ','?)? ')'
;
positional_patterns
: pattern (',' pattern)*;
keyword_patterns
: keyword_pattern (',' keyword_pattern)*;
keyword_pattern
: NAME '=' pattern;
// Type statement
// ---------------
type_alias
: soft_kw_type NAME type_params? '=' expression;
// Type parameter declaration
// --------------------------
type_params: '[' type_param_seq ']';
type_param_seq: type_param (',' type_param)* ','?;
type_param
: NAME type_param_bound?
| '*' NAME (':' expression)?
| '**' NAME (':' expression)?
;
type_param_bound: ':' expression;
// EXPRESSIONS
// -----------
expressions
: expression (',' expression )* ','?
;
expression
: disjunction ('if' disjunction 'else' expression)?
| lambdef
;
yield_expr
: 'yield' ('from' expression | star_expressions?)
;
star_expressions
: star_expression (',' star_expression )* ','?
;
star_expression
: '*' bitwise_or
| expression;
star_named_expressions: star_named_expression (',' star_named_expression)* ','?;
star_named_expression
: '*' bitwise_or
| named_expression;
assignment_expression
: NAME ':=' expression;
named_expression
: assignment_expression
| expression;
disjunction
: conjunction ('or' conjunction )*
;
conjunction
: inversion ('and' inversion )*
;
inversion
: 'not' inversion
| comparison;
// Comparison operators
// --------------------
comparison
: bitwise_or compare_op_bitwise_or_pair*
;
compare_op_bitwise_or_pair
: eq_bitwise_or
| noteq_bitwise_or
| lte_bitwise_or
| lt_bitwise_or
| gte_bitwise_or
| gt_bitwise_or
| notin_bitwise_or
| in_bitwise_or
| isnot_bitwise_or
| is_bitwise_or;
eq_bitwise_or: '==' bitwise_or;
noteq_bitwise_or
: ('!=' ) bitwise_or;
lte_bitwise_or: '<=' bitwise_or;
lt_bitwise_or: '<' bitwise_or;
gte_bitwise_or: '>=' bitwise_or;
gt_bitwise_or: '>' bitwise_or;
notin_bitwise_or: 'not' 'in' bitwise_or;
in_bitwise_or: 'in' bitwise_or;
isnot_bitwise_or: 'is' 'not' bitwise_or;
is_bitwise_or: 'is' bitwise_or;
// Bitwise operators
// -----------------
bitwise_or
: bitwise_or '|' bitwise_xor
| bitwise_xor;
bitwise_xor
: bitwise_xor '^' bitwise_and
| bitwise_and;
bitwise_and
: bitwise_and '&' shift_expr
| shift_expr;
shift_expr
: shift_expr ('<<' | '>>') sum
| sum
;
// Arithmetic operators
// --------------------
sum
: sum ('+' | '-') term
| term
;
term
: term ('*' | '/' | '//' | '%' | '@') factor
| factor
;
factor
: '+' factor
| '-' factor
| '~' factor
| power;
power
: await_primary ('**' factor)?
;
// Primary elements
// ----------------
// Primary elements are things like "obj.something.something", "obj[something]", "obj(something)", "obj" ...
await_primary
: AWAIT primary
| primary;
primary
: primary ('.' NAME | genexp | '(' arguments? ')' | '[' slices ']')
| atom
;
slices
: slice
| (slice | starred_expression) (',' (slice | starred_expression))* ','?;
slice
: expression? ':' expression? (':' expression? )?
| named_expression;
atom
: NAME
| 'True'
| 'False'
| 'None'
| strings
| NUMBER
| (tuple | group | genexp)
| (list | listcomp)
| (dict | set | dictcomp | setcomp)
| '...';
group
: '(' (yield_expr | named_expression) ')';
// Lambda functions
// ----------------
lambdef
: 'lambda' lambda_params? ':' expression;
lambda_params
: lambda_parameters;
// lambda_parameters etc. duplicates parameters but without annotations
// or type comments, and if there's no comma after a parameter, we expect
// a colon, not a close parenthesis. (For more, see parameters above.)
//
lambda_parameters
: lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?
| lambda_slash_with_default lambda_param_with_default* lambda_star_etc?
| lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?
| lambda_param_with_default+ lambda_star_etc?
| lambda_star_etc;
lambda_slash_no_default
: lambda_param_no_default+ '/' ','?
;
lambda_slash_with_default
: lambda_param_no_default* lambda_param_with_default+ '/' ','?
;
lambda_star_etc
: '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?
| '*' ',' lambda_param_maybe_default+ lambda_kwds?
| lambda_kwds;
lambda_kwds
: '**' lambda_param_no_default;
lambda_param_no_default
: lambda_param ','?
;
lambda_param_with_default
: lambda_param default_assignment ','?
;
lambda_param_maybe_default
: lambda_param default_assignment? ','?
;
lambda_param: NAME;
// LITERALS
// ========
fstring_middle
: fstring_replacement_field
| FSTRING_MIDDLE;
fstring_replacement_field
: LBRACE (yield_expr | star_expressions) '='? fstring_conversion? fstring_full_format_spec? RBRACE;
fstring_conversion
: '!' NAME;
fstring_full_format_spec
: ':' fstring_format_spec*;
fstring_format_spec
: FSTRING_MIDDLE
| fstring_replacement_field;
fstring
: FSTRING_START fstring_middle* FSTRING_END;
string: STRING;
strings: (fstring|string)+;
list
: '[' star_named_expressions? ']';
tuple
: '(' (star_named_expression ',' star_named_expressions? )? ')';
set: LBRACE star_named_expressions RBRACE;
// Dicts
// -----
dict
: LBRACE double_starred_kvpairs? RBRACE;
double_starred_kvpairs: double_starred_kvpair (',' double_starred_kvpair)* ','?;
double_starred_kvpair
: '**' bitwise_or
| kvpair;
kvpair: expression ':' expression;
// Comprehensions & Generators
// ---------------------------
for_if_clauses
: for_if_clause+;
for_if_clause
: ASYNC? 'for' star_targets 'in' disjunction ('if' disjunction )*
;
listcomp
: '[' named_expression for_if_clauses ']';
setcomp
: LBRACE named_expression for_if_clauses RBRACE;
genexp
: '(' ( assignment_expression | expression) for_if_clauses ')';
dictcomp
: LBRACE kvpair for_if_clauses RBRACE;
// FUNCTION CALL ARGUMENTS
// =======================
arguments
: args ','?;
args
: (starred_expression | ( assignment_expression | expression)) (',' (starred_expression | ( assignment_expression | expression)))* (',' kwargs )?
| kwargs;
kwargs
: kwarg_or_starred (',' kwarg_or_starred)* (',' kwarg_or_double_starred (',' kwarg_or_double_starred)*)?
| kwarg_or_double_starred (',' kwarg_or_double_starred)*
;
starred_expression
: '*' expression;
kwarg_or_starred
: NAME '=' expression
| starred_expression;
kwarg_or_double_starred
: NAME '=' expression
| '**' expression;
// ASSIGNMENT TARGETS
// ==================
// Generic targets
// ---------------
// NOTE: star_targets may contain *bitwise_or, targets may not.
star_targets
: star_target (',' star_target )* ','?
;
star_targets_list_seq: star_target (',' star_target)+ ','?;
star_targets_tuple_seq
: star_target (',' | (',' star_target )+ ','?)
;
star_target
: '*' (star_target)
| target_with_star_atom;
target_with_star_atom
: t_primary ('.' NAME | '[' slices ']')
| star_atom
;
star_atom
: NAME
| '(' target_with_star_atom ')'
| '(' star_targets_tuple_seq? ')'
| '[' star_targets_list_seq? ']';
single_target
: single_subscript_attribute_target
| NAME
| '(' single_target ')';
single_subscript_attribute_target
: t_primary ('.' NAME | '[' slices ']')
;
t_primary
: t_primary ('.' NAME | '[' slices ']' | genexp | '(' arguments? ')')
| atom
;
// Targets for del statements
// --------------------------
del_targets: del_target (',' del_target)* ','?;
del_target
: t_primary ('.' NAME | '[' slices ']')
| del_t_atom
;
del_t_atom
: NAME
| '(' del_target ')'
| '(' del_targets? ')'
| '[' del_targets? ']';
// TYPING ELEMENTS
// ---------------
// type_expressions allow */** but ignore them
type_expressions
: expression (',' expression)* (',' ('*' expression (',' '**' expression)? | '**' expression))?
| '*' expression (',' '**' expression)?
| '**' expression
;
func_type_comment
: NEWLINE TYPE_COMMENT // Must be followed by indented block
| TYPE_COMMENT;
// *** Soft Keywords: https://docs.python.org/3.12/reference/lexical_analysis.html#soft-keywords
soft_kw_type: {this.isEqualToCurrentTokenText("type")}? NAME;
soft_kw_match: {this.isEqualToCurrentTokenText("match")}? NAME;
soft_kw_case: {this.isEqualToCurrentTokenText("case")}? NAME;
soft_kw_wildcard: {this.isEqualToCurrentTokenText("_")}? NAME;
soft_kw__not__wildcard: {this.isnotEqualToCurrentTokenText("_")}? NAME;
// ========================= END OF THE GRAMMAR ===========================

View File

@ -0,0 +1,21 @@
using Antlr4.Runtime;
namespace MicroForge.Parsing;
public abstract class PythonParserBase : Parser
{
protected PythonParserBase(ITokenStream input) : base(input)
{
}
// https://docs.python.org/3/reference/lexical_analysis.html#soft-keywords
public bool isEqualToCurrentTokenText(string tokenText)
{
return this.CurrentToken.Text == tokenText;
}
public bool isnotEqualToCurrentTokenText(string tokenText)
{
return !this.isEqualToCurrentTokenText(tokenText); // for compatibility with the Python 'not' logical operator
}
}

22
MicroForge.sln Normal file
View File

@ -0,0 +1,22 @@

Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MicroForge.CLI", "MicroForge.CLI\MicroForge.CLI.csproj", "{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MicroForge.Parsing", "MicroForge.Parsing\MicroForge.Parsing.csproj", "{D697CEFD-7CF7-4680-82FC-F84B08F81635}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Debug|Any CPU.Build.0 = Debug|Any CPU
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Release|Any CPU.ActiveCfg = Release|Any CPU
{27EFB015-AFC3-4046-8D9A-DD5C5D3B35E0}.Release|Any CPU.Build.0 = Release|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D697CEFD-7CF7-4680-82FC-F84B08F81635}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

46
README.Docker.md Normal file
View File

@ -0,0 +1,46 @@
### Building and running your application
When you're ready, start your application by running:
`docker compose up --build`.
Your application will be available at http://localhost:8080.
### Deploying your application to the cloud
First, build your image, e.g.: `docker build -t myapp .`.
If your cloud uses a different CPU architecture than your development
machine (e.g., you are on a Mac M1 and your cloud provider is amd64),
you'll want to build the image for that platform, e.g.:
`docker build --platform=linux/amd64 -t myapp .`.
Then, push it to your registry, e.g. `docker push myregistry.com/myapp`.
Consult Docker's [getting started](https://docs.docker.com/go/get-started-sharing/)
docs for more detail on building and pushing.
### References
* [Docker's .NET guide](https://docs.docker.com/language/dotnet/)
* The [dotnet-docker](https://github.com/dotnet/dotnet-docker/tree/main/samples)
repository has many relevant samples and docs.
### Dependencies
bash (/usr/bin/bash)
Python 3.10.2 (/usr/bin/python3)
- python3-pip
- python3-venv
pkg-config (sudo apt update && sudo apt install pkg-config)
### Notes
Try Running NuGet restore when ANTLR doesn't generate Lexer or Parser
### TODO
- Make entrypoint, i.e. main.py, customizable or fixed?
- Figure out why BashException cannot be caught, can it be due to the differences in scoping?
Because the `Bash` class is static and the services calling `Bash.ExecuteAsync` are in the container.
Maybe this in combination with the async nature of the whole thing?
- Make it so that `Bash.ExecuteAsync` can run "directly" in the calling terminal also.
- This will improve the usability of the "m4g run" command.