Compare commits
29 Commits
Author | SHA1 | Date |
---|---|---|
|
901147ac3c | ago%!(EXTRA string=2 months) |
|
7136685517 | ago%!(EXTRA string=7 months) |
|
59096eab78 | ago%!(EXTRA string=7 months) |
|
cf4f218675 | ago%!(EXTRA string=7 months) |
|
115a0bc47f | ago%!(EXTRA string=11 months) |
|
83a530b56d | ago%!(EXTRA string=11 months) |
|
f7a8e01098 | ago%!(EXTRA string=11 months) |
|
a9a8250158 | ago%!(EXTRA string=11 months) |
|
c532f161b0 | ago%!(EXTRA string=11 months) |
|
ef24325827 | ago%!(EXTRA string=11 months) |
|
73745d5e0f | ago%!(EXTRA string=11 months) |
|
a0ad36ac93 | ago%!(EXTRA string=11 months) |
|
2e8b96381d | ago%!(EXTRA string=11 months) |
|
eb7f53e499 | ago%!(EXTRA string=11 months) |
|
9a2b979a51 | ago%!(EXTRA string=11 months) |
|
f1a7f44fa0 | ago%!(EXTRA string=11 months) |
|
ec96c56545 | ago%!(EXTRA string=11 months) |
|
51e87a76c2 | ago%!(EXTRA string=11 months) |
|
50b1e94d5e | ago%!(EXTRA string=11 months) |
|
132823f393 | ago%!(EXTRA string=11 months) |
|
706478f5c1 | ago%!(EXTRA string=11 months) |
|
6c6e210522 | ago%!(EXTRA string=11 months) |
|
41da487b20 | ago%!(EXTRA string=11 months) |
|
640781160e | ago%!(EXTRA string=11 months) |
|
374b3d3c34 | ago%!(EXTRA string=11 months) |
|
c75a3ec50b | ago%!(EXTRA string=11 months) |
|
49756a0fe6 | ago%!(EXTRA string=11 months) |
|
107c524783 | ago%!(EXTRA string=11 months) |
|
c0a07a8735 | ago%!(EXTRA string=11 months) |
23 changed files with 246 additions and 522 deletions
@ -1,6 +0,0 @@ |
|||||||
/test_app/ |
|
||||||
bin/ |
|
||||||
build/ |
|
||||||
dist/ |
|
||||||
**.spec |
|
||||||
**__pycache__** |
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,44 @@ |
|||||||
|
## Generate compile_commands.json for premake projects |
||||||
|
|
||||||
|
This module implements [JSON Compilation Database Format |
||||||
|
Specification](http://clang.llvm.org/docs/JSONCompilationDatabase.html) for |
||||||
|
premake projects. |
||||||
|
|
||||||
|
Install this module somewhere premake can find it, for example: |
||||||
|
|
||||||
|
``` |
||||||
|
git clone https://github.com/tarruda/premake-export-compile-commands export-compile-commands |
||||||
|
``` |
||||||
|
|
||||||
|
Then put this at the top of your system script(eg: ~/.premake/premake-system.lua): |
||||||
|
|
||||||
|
```lua |
||||||
|
require "export-compile-commands" |
||||||
|
``` |
||||||
|
|
||||||
|
Note that while possible, it is not recommended to put the `require` line in |
||||||
|
project-specific premake configuration because the "export-compile-commands" |
||||||
|
module will need to be installed everywhere your project is built. |
||||||
|
|
||||||
|
After the above steps, the "export-compile-commands" action will be available |
||||||
|
for your projects: |
||||||
|
|
||||||
|
``` |
||||||
|
premake5 export-compile-commands |
||||||
|
``` |
||||||
|
|
||||||
|
The `export-compile-commands` action will generate one json file per |
||||||
|
config/platform combination in each workspace, all under the `compile_commands` |
||||||
|
subdirectory. For example, say you have defined `debug` and `release` |
||||||
|
configurations with `x32` and `x64` platforms, the output will be something |
||||||
|
like: |
||||||
|
|
||||||
|
``` |
||||||
|
Generated WORKSPACE_BUILD_DIR/compile_commands/debug_x32.json... |
||||||
|
Generated WORKSPACE_BUILD_DIR/compile_commands/debug_x64.json... |
||||||
|
Generated WORKSPACE_BUILD_DIR/compile_commands/release_x32.json... |
||||||
|
Generated WORKSPACE_BUILD_DIR/compile_commands/release_x64.json... |
||||||
|
``` |
||||||
|
|
||||||
|
where each file contain the compilation commands for the corresponding |
||||||
|
config/platform combo. |
@ -0,0 +1,4 @@ |
|||||||
|
return { |
||||||
|
'_preload.lua', |
||||||
|
'export-compile-commands.lua', |
||||||
|
} |
@ -0,0 +1,3 @@ |
|||||||
|
return function(cfg) |
||||||
|
return _ACTION == 'export-compile-commands' |
||||||
|
end |
@ -0,0 +1,133 @@ |
|||||||
|
local p = premake |
||||||
|
|
||||||
|
p.modules.export_compile_commands = {} |
||||||
|
local m = p.modules.export_compile_commands |
||||||
|
|
||||||
|
local workspace = p.workspace |
||||||
|
local project = p.project |
||||||
|
|
||||||
|
function m.getToolset(cfg) |
||||||
|
return p.tools[cfg.toolset or 'gcc'] |
||||||
|
end |
||||||
|
|
||||||
|
function m.getIncludeDirs(cfg) |
||||||
|
local flags = {} |
||||||
|
for _, dir in ipairs(cfg.includedirs) do |
||||||
|
table.insert(flags, '-I' .. p.quoted(dir)) |
||||||
|
end |
||||||
|
for _, dir in ipairs(cfg.sysincludedir or {}) do |
||||||
|
table.insert(result, '-isystem ' .. p.quoted(dir)) |
||||||
|
end |
||||||
|
return flags |
||||||
|
end |
||||||
|
|
||||||
|
function m.getCommonFlags(cfg) |
||||||
|
local toolset = m.getToolset(cfg) |
||||||
|
local flags = toolset.getcppflags(cfg) |
||||||
|
flags = table.join(flags, toolset.getdefines(cfg.defines)) |
||||||
|
flags = table.join(flags, toolset.getundefines(cfg.undefines)) |
||||||
|
-- can't use toolset.getincludedirs because some tools that consume |
||||||
|
-- compile_commands.json have problems with relative include paths |
||||||
|
flags = table.join(flags, m.getIncludeDirs(cfg)) |
||||||
|
flags = table.join(flags, toolset.getcflags(cfg)) |
||||||
|
return table.join(flags, cfg.buildoptions) |
||||||
|
end |
||||||
|
|
||||||
|
function m.getObjectPath(prj, cfg, node) |
||||||
|
return path.join(cfg.objdir, path.appendExtension(node.objname, '.o')) |
||||||
|
end |
||||||
|
|
||||||
|
function m.getDependenciesPath(prj, cfg, node) |
||||||
|
return path.join(cfg.objdir, path.appendExtension(node.objname, '.d')) |
||||||
|
end |
||||||
|
|
||||||
|
function m.getFileFlags(prj, cfg, node) |
||||||
|
return table.join(m.getCommonFlags(cfg), { |
||||||
|
'-o', m.getObjectPath(prj, cfg, node), |
||||||
|
'-MF', m.getDependenciesPath(prj, cfg, node), |
||||||
|
'-c', node.abspath |
||||||
|
}) |
||||||
|
end |
||||||
|
|
||||||
|
function m.generateCompileCommand(prj, cfg, node) |
||||||
|
return { |
||||||
|
directory = prj.location, |
||||||
|
file = node.abspath, |
||||||
|
command = 'cc '.. table.concat(m.getFileFlags(prj, cfg, node), ' ') |
||||||
|
} |
||||||
|
end |
||||||
|
|
||||||
|
function m.includeFile(prj, node, depth) |
||||||
|
return path.iscppfile(node.abspath) |
||||||
|
end |
||||||
|
|
||||||
|
function m.getConfig(prj) |
||||||
|
if _OPTIONS['export-compile-commands-config'] then |
||||||
|
return project.getconfig(prj, _OPTIONS['export-compile-commands-config'], |
||||||
|
_OPTIONS['export-compile-commands-platform']) |
||||||
|
end |
||||||
|
for cfg in project.eachconfig(prj) do |
||||||
|
-- just use the first configuration which is usually "Debug" |
||||||
|
return cfg |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
function m.getProjectCommands(prj, cfg) |
||||||
|
local tr = project.getsourcetree(prj) |
||||||
|
local cmds = {} |
||||||
|
p.tree.traverse(tr, { |
||||||
|
onleaf = function(node, depth) |
||||||
|
if not m.includeFile(prj, node, depth) then |
||||||
|
return |
||||||
|
end |
||||||
|
table.insert(cmds, m.generateCompileCommand(prj, cfg, node)) |
||||||
|
end |
||||||
|
}) |
||||||
|
return cmds |
||||||
|
end |
||||||
|
|
||||||
|
local function execute() |
||||||
|
for wks in p.global.eachWorkspace() do |
||||||
|
local cfgCmds = {} |
||||||
|
for prj in workspace.eachproject(wks) do |
||||||
|
for cfg in project.eachconfig(prj) do |
||||||
|
local cfgKey = string.format('%s', cfg.shortname) |
||||||
|
if not cfgCmds[cfgKey] then |
||||||
|
cfgCmds[cfgKey] = {} |
||||||
|
end |
||||||
|
cfgCmds[cfgKey] = table.join(cfgCmds[cfgKey], m.getProjectCommands(prj, cfg)) |
||||||
|
end |
||||||
|
end |
||||||
|
for cfgKey,cmds in pairs(cfgCmds) do |
||||||
|
local outfile = string.format('compile_commands/%s.json', cfgKey) |
||||||
|
p.generate(wks, outfile, function(wks) |
||||||
|
p.w('[') |
||||||
|
for i = 1, #cmds do |
||||||
|
local item = cmds[i] |
||||||
|
local command = string.format([[ |
||||||
|
{ |
||||||
|
"directory": "%s", |
||||||
|
"file": "%s", |
||||||
|
"command": "%s" |
||||||
|
}]], |
||||||
|
item.directory, |
||||||
|
item.file, |
||||||
|
item.command:gsub('\\', '\\\\'):gsub('"', '\\"')) |
||||||
|
if i > 1 then |
||||||
|
p.w(',') |
||||||
|
end |
||||||
|
p.w(command) |
||||||
|
end |
||||||
|
p.w(']') |
||||||
|
end) |
||||||
|
end |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
newaction { |
||||||
|
trigger = 'export-compile-commands', |
||||||
|
description = 'Export compiler commands in JSON Compilation Database Format', |
||||||
|
execute = execute |
||||||
|
} |
||||||
|
|
||||||
|
return m |
Binary file not shown.
Binary file not shown.
@ -0,0 +1,11 @@ |
|||||||
|
echo "Searching path in env PATH" |
||||||
|
$PATH = [Environment]::GetEnvironmentVariable("PATH", "Machine") |
||||||
|
$bakasable_path = "C:\Program Files\bakasable" |
||||||
|
if( $PATH -notlike "*"+$bakasable_path+"*" ){ |
||||||
|
echo "Path not found in env PATH" |
||||||
|
echo "Adding path" |
||||||
|
[Environment]::SetEnvironmentVariable("PATH", "$PATH;$bakasable_path", "Machine") |
||||||
|
} |
||||||
|
else { |
||||||
|
echo "Path already added" |
||||||
|
} |
@ -1 +0,0 @@ |
|||||||
pyinstaller src/main.py --onefile --distpath ./bin -n bakasable |
|
@ -0,0 +1,16 @@ |
|||||||
|
@echo off |
||||||
|
|
||||||
|
cd %~dp0 |
||||||
|
|
||||||
|
echo Installing bakasable |
||||||
|
|
||||||
|
md "C:\Program Files\bakasable" |
||||||
|
|
||||||
|
md "C:\Program Files\bakasable\cache" |
||||||
|
|
||||||
|
xcopy /y .\bin\windows\bakasable.exe "C:\Program Files\bakasable" |
||||||
|
xcopy /y .\bin\windows\premake5.exe "C:\Program Files\bakasable" |
||||||
|
|
||||||
|
powershell .\path.ps1 |
||||||
|
|
||||||
|
pause |
@ -0,0 +1,34 @@ |
|||||||
|
cd $(dirname "$0") |
||||||
|
|
||||||
|
handle_error() { |
||||||
|
echo "An error occurred on line $1" |
||||||
|
rm -rf ~/.bakasable |
||||||
|
cd $(pwd) |
||||||
|
exit 1 |
||||||
|
} |
||||||
|
trap 'handle_error $LINENO' ERR |
||||||
|
|
||||||
|
echo Installing bakasable |
||||||
|
mkdir -m 777 ~/.bakasable |
||||||
|
mkdir ~/.bakasable/cache |
||||||
|
cp -f ./bin/linux/bakasable ~/.bakasable/ |
||||||
|
|
||||||
|
if [ ! $(which premake5) ]; then |
||||||
|
echo Installing premake |
||||||
|
cp -f ./bin/linux/premake5 ~/.bakasable/ |
||||||
|
chmod +x ~/.bakasable/premake5 |
||||||
|
echo Installing export-compile-commands module |
||||||
|
cp -rf ./bin/vendor/export-compile-commands ~/.bakasable/ |
||||||
|
echo 'require "export-compile-commands"' >> ~/.bakasable/premake-system.lua |
||||||
|
fi |
||||||
|
|
||||||
|
echo Searching path in env PATH |
||||||
|
if [ ! $(which bakasable) ]; then |
||||||
|
echo Path not found in env PATH |
||||||
|
echo Adding path |
||||||
|
export PATH=\$PATH:~/.bakasable |
||||||
|
echo "export PATH=\$PATH:~/.bakasable" >> ~/.bashrc |
||||||
|
else |
||||||
|
echo Path already added |
||||||
|
fi |
||||||
|
cd $(pwd) |
@ -1,8 +0,0 @@ |
|||||||
import os |
|
||||||
import sys |
|
||||||
from Log import ShColors |
|
||||||
|
|
||||||
def exec(command): |
|
||||||
print(ShColors.OKGREEN) |
|
||||||
os.system(f"{command}") |
|
||||||
print(ShColors.ENDC) |
|
@ -1,30 +0,0 @@ |
|||||||
class ShColors: |
|
||||||
HEADER = '\033[95m' |
|
||||||
OKBLUE = '\033[94m' |
|
||||||
OKCYAN = '\033[96m' |
|
||||||
OKGREEN = '\033[92m' |
|
||||||
WARNING = '\033[93m' |
|
||||||
FAIL = '\033[91m' |
|
||||||
ENDC = '\033[0m' |
|
||||||
BOLD = '\033[1m' |
|
||||||
UNDERLINE = '\033[4m' |
|
||||||
|
|
||||||
def logo() -> None: |
|
||||||
print( |
|
||||||
f""" |
|
||||||
{ShColors.BOLD}____ _ _ __ _ ____ _ ____ _ _____ |
|
||||||
| __ ) / \\ | |/ / / \\ / ___| / \\ | __ )| | | ____| |
|
||||||
| _ \\ / _ \\ | ' / / _ \\ \\___ \\ / _ \\ | _ \\| | | _| |
|
||||||
| |_) / ___ \\| . \\ / ___ \\ ___) / ___ \\| |_) | |___| |___ |
|
||||||
|____/_/ \\_\\_|\\_\\/_/ \\_\\____/_/ \\_\\____/|_____|_____| |
|
||||||
{ShColors.ENDC}""") |
|
||||||
|
|
||||||
def info(message) -> None: |
|
||||||
print(f"{ShColors.OKGREEN}[INFO] {message}{ShColors.ENDC}") |
|
||||||
|
|
||||||
def warning(message) -> None: |
|
||||||
print(f"{ShColors.WARNING}[WARNING] {message}{ShColors.ENDC}") |
|
||||||
|
|
||||||
def error(message) -> None: |
|
||||||
print(f"{ShColors.FAIL}[ERROR] {ShColors.UNDERLINE}{message}{ShColors.ENDC}") |
|
||||||
exit(1) |
|
@ -1,161 +0,0 @@ |
|||||||
import shutil |
|
||||||
import stat |
|
||||||
import os |
|
||||||
import json |
|
||||||
import Command |
|
||||||
import ToolChaine |
|
||||||
import webbrowser |
|
||||||
import Log |
|
||||||
|
|
||||||
def config(package): |
|
||||||
Log.info(f"Reconfiguring package {package}") |
|
||||||
if os.path.exists(f"./vendor/{package}/dependencies"): |
|
||||||
dep = open(f"./vendor/{package}/dependencies", "r") |
|
||||||
pkg_deps = dep.read() |
|
||||||
dep.close() |
|
||||||
if not os.path.exists("./dependencies.lua"): |
|
||||||
dep = open("./dependencies.lua", "w") |
|
||||||
dep.write("IncludeDirs = {}") |
|
||||||
dep.write("\n" + pkg_deps) |
|
||||||
dep.close() |
|
||||||
else: |
|
||||||
dep = open(f"./dependencies.lua", "a") |
|
||||||
dep.write("\n" + pkg_deps) |
|
||||||
dep.close() |
|
||||||
|
|
||||||
linker = [] |
|
||||||
if os.path.exists(f"./vendor/{package}/package.json"): |
|
||||||
f_conf = open(f"./vendor/{package}/package.json", "r") |
|
||||||
conf = json.loads(f_conf.read()) |
|
||||||
f_conf.close() |
|
||||||
linker.append({ |
|
||||||
"links": conf["links"], |
|
||||||
"includes": conf["includes"] |
|
||||||
}) |
|
||||||
return linker |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def reconfig(): |
|
||||||
Log.info("Reconfiguring build settings :") |
|
||||||
f_conf = open("./package.json", "r") |
|
||||||
conf = json.loads(f_conf.read()) |
|
||||||
f_conf.close() |
|
||||||
if os.path.exists("./dependencies.lua"): os.remove("./dependencies.lua") |
|
||||||
if os.path.exists("./app/linker.lua"): os.remove("./app/linker.lua") |
|
||||||
linkers = [] |
|
||||||
for package in os.listdir("./vendor"): |
|
||||||
linkers += config(package) |
|
||||||
links = "\nlinks\n{\n" |
|
||||||
includes = '\nincludedirs\n{\n\t"%{prj.location}/src/",\n' |
|
||||||
for linker in linkers: |
|
||||||
if len(linker["links"]): |
|
||||||
for link in linker["links"]: |
|
||||||
if len(link): links += '\t"' + link + '",\n' |
|
||||||
if len(linker["includes"]): |
|
||||||
for include in linker["includes"]: |
|
||||||
if len(include): includes += '\t"%{IncludeDirs.' + include + '}",\n' |
|
||||||
links += "}\n" |
|
||||||
includes += "}\n" |
|
||||||
f_linker= open("./app/linker.lua", "w") |
|
||||||
f_linker.write(includes + links) |
|
||||||
f_linker.close() |
|
||||||
|
|
||||||
def install(author, package, repo) -> None: |
|
||||||
Log.info(f"Installing package {package}") |
|
||||||
if os.path.exists(f"./vendor/{package}"): |
|
||||||
Log.warning(f"Package {package} already added") |
|
||||||
if os.path.exists(f"./vendor/{package}/package.json"): |
|
||||||
conf = open(f"./vendor/{package}/package.json", "r").read() |
|
||||||
conf = json.loads(conf) |
|
||||||
if len(conf["packages"]) > 0: |
|
||||||
for pkg in conf["packages"]: |
|
||||||
install(pkg["author"], pkg["name"], pkg["repo"] if "repo" in pkg else "git.anulax.ch") |
|
||||||
return |
|
||||||
|
|
||||||
if not ToolChaine.tool_exist("git"): |
|
||||||
Log.error("Tool missing git") |
|
||||||
|
|
||||||
Command.exec(f"git clone --depth 5 https://{repo}/{author}/{package} ./vendor/{package}") |
|
||||||
if os.path.exists(f"./vendor/{package}/package.json"): |
|
||||||
conf = open(f"./vendor/{package}/package.json", "r").read() |
|
||||||
conf = json.loads(conf) |
|
||||||
if len(conf["packages"]) > 0: |
|
||||||
for pkg in conf["packages"]: |
|
||||||
install(pkg["author"], pkg["name"], pkg["repo"] if "repo" in pkg else "git.anulax.ch") |
|
||||||
|
|
||||||
def add(author, package) -> None: |
|
||||||
f_conf = open("./package.json", "r") |
|
||||||
conf = json.loads(f_conf.read()) |
|
||||||
f_conf.close() |
|
||||||
if package in conf["packages"]: Log.error("Package already added") |
|
||||||
f_conf = open("./package.json", "w") |
|
||||||
conf["packages"].append({ "author": author, "name": package}) |
|
||||||
f_conf.write(json.dumps(conf, indent=4)) |
|
||||||
f_conf.close() |
|
||||||
|
|
||||||
install(author, package) |
|
||||||
reconfig() |
|
||||||
|
|
||||||
def update(package) -> None: |
|
||||||
Log.info(f"Updating package {package}") |
|
||||||
if not os.path.exists(f"./vendor/{package}"): Log.error("Package not found") |
|
||||||
os.chdir(f"./vendor/{package}") |
|
||||||
Command.exec("git pull") |
|
||||||
os.chdir("../../") |
|
||||||
reconfig() |
|
||||||
|
|
||||||
def save(package, message, paths) -> None: |
|
||||||
Log.info(f"Saving package {package}") |
|
||||||
if not os.path.exists(f"./vendor/{package}"): Log.error("Package not found") |
|
||||||
adds = "" |
|
||||||
if paths is not None and len(paths): |
|
||||||
for path in paths: |
|
||||||
adds += path + " " |
|
||||||
else: adds = "." |
|
||||||
Log.info(f"Paths to archive : {adds}") |
|
||||||
os.chdir(f"./vendor/{package}") |
|
||||||
Command.exec("git status") |
|
||||||
Command.exec(f"git add {adds}") |
|
||||||
Command.exec(f'git commit -m "{message}"') |
|
||||||
Command.exec("git push") |
|
||||||
|
|
||||||
def remove(package) -> None: |
|
||||||
f_conf = open("./package.json", "r") |
|
||||||
conf = json.loads(f_conf.read()) |
|
||||||
f_conf.close() |
|
||||||
conf["packages"] = [pkg for pkg in conf["packages"] if pkg['name'] != package] |
|
||||||
r_remove(package) |
|
||||||
f_conf = open("./package.json", "w") |
|
||||||
f_conf.write(json.dumps(conf, indent=4)) |
|
||||||
f_conf.close() |
|
||||||
reconfig() |
|
||||||
|
|
||||||
def r_remove(package) -> None: |
|
||||||
Log.info(f"Removing package {package}") |
|
||||||
if not os.path.exists(f"./vendor/{package}/") : Log.error(f"Package {package} not the dependencies") |
|
||||||
if os.path.exists(f"./vendor/{package}/package.json") : |
|
||||||
r_pkgs = json.loads(open(f"./vendor/{package}/package.json", "r").read())["packages"] |
|
||||||
for r_pkg in r_pkgs: |
|
||||||
r_remove(r_pkg["name"]) |
|
||||||
os.chmod(f"./vendor/{package}/", stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) |
|
||||||
shutil.rmtree(f"./vendor/{package}/", ignore_errors=True) |
|
||||||
|
|
||||||
def install_root() -> None: |
|
||||||
Log.info("Reinstalling all packages :") |
|
||||||
if not os.path.exists("./package.json"): |
|
||||||
Log.error("No package config file") |
|
||||||
f_conf = open("./package.json", "r") |
|
||||||
conf = json.loads(f_conf.read()) |
|
||||||
f_conf.close() |
|
||||||
for pkg in conf["packages"]: |
|
||||||
install(pkg["author"], pkg["name"], pkg["repo"] if "repo" in pkg else "git.anulax.ch") |
|
||||||
reconfig() |
|
||||||
|
|
||||||
def load_doc(package) -> None: |
|
||||||
if not ToolChaine.tool_exist("doxygen"): |
|
||||||
Log.error("Tool missing doxygen") |
|
||||||
if not os.path.exists(f"./vendor/{package}/Doxyfile"): |
|
||||||
Log.error("Doxygen config file not found") |
|
||||||
Command.exec(f"doxygen ./vendor/{package}") |
|
||||||
webbrowser.open("file://" + os.path.realpath(f"./vendor/{package}/docs/html/index.html")) |
|
@ -1,52 +0,0 @@ |
|||||||
def get() -> str: |
|
||||||
return """project "App" |
|
||||||
kind "ConsoleApp" |
|
||||||
language "C++" |
|
||||||
cppdialect "C++20" |
|
||||||
systemversion "latest" |
|
||||||
|
|
||||||
targetdir("%{wks.location}/bin/" .. outputdir .. "/%{prj.name}") |
|
||||||
objdir("%{wks.location}/bin-int/" .. outputdir .. "/%{prj.name}") |
|
||||||
|
|
||||||
include "linker.lua" |
|
||||||
|
|
||||||
files |
|
||||||
{ |
|
||||||
"src/**.h", |
|
||||||
"src/**.cpp" |
|
||||||
} |
|
||||||
|
|
||||||
filter "configurations:Debug" |
|
||||||
defines |
|
||||||
{ |
|
||||||
"BK_DEBUG", |
|
||||||
"DEBUG" |
|
||||||
} |
|
||||||
runtime "Debug" |
|
||||||
symbols "on" |
|
||||||
|
|
||||||
|
|
||||||
filter "configurations:Release" |
|
||||||
defines |
|
||||||
{ |
|
||||||
"BK_RELEASE", |
|
||||||
"NDEBUG" |
|
||||||
} |
|
||||||
runtime "Release" |
|
||||||
optimize "on" |
|
||||||
|
|
||||||
filter "system:windows" |
|
||||||
buildoptions "/MT" |
|
||||||
staticruntime "on" |
|
||||||
defines |
|
||||||
{ |
|
||||||
"BK_PLATFORM_WINDOWS" |
|
||||||
} |
|
||||||
|
|
||||||
filter "system:linux" |
|
||||||
staticruntime "on" |
|
||||||
defines |
|
||||||
{ |
|
||||||
"BK_PLATFORM_LINUX" |
|
||||||
} |
|
||||||
""" |
|
@ -1,20 +0,0 @@ |
|||||||
def get(name) -> str: |
|
||||||
return '''workspace "''' + name + '''" |
|
||||||
architecture "x64" |
|
||||||
configurations { "Debug", "Release" } |
|
||||||
startproject "App" |
|
||||||
|
|
||||||
flags |
|
||||||
{ |
|
||||||
"MultiProcessorCompile" |
|
||||||
} |
|
||||||
toolset "clang" |
|
||||||
linkgroups "On" |
|
||||||
outputdir = "%{cfg.system}-%{cfg.architecture}-%{cfg.buildcfg}" |
|
||||||
|
|
||||||
include "dependencies.lua" |
|
||||||
|
|
||||||
group "App" |
|
||||||
include "app" |
|
||||||
group "" |
|
||||||
''' |
|
@ -1,2 +0,0 @@ |
|||||||
from . import Wks |
|
||||||
from . import App |
|
@ -1,88 +0,0 @@ |
|||||||
import os |
|
||||||
import json |
|
||||||
import Premake |
|
||||||
import Command |
|
||||||
import Log |
|
||||||
|
|
||||||
class Builder: |
|
||||||
def __init__(self, conf) -> None: |
|
||||||
self.owner = conf["author"] |
|
||||||
self.name = conf["name"] |
|
||||||
self.git_repo = conf["git"] |
|
||||||
|
|
||||||
def __init__(self, name, repo, author = "") -> None: |
|
||||||
self.name = name |
|
||||||
self.owner = author |
|
||||||
self.git_repo = repo |
|
||||||
|
|
||||||
def set_git_repo(self, url) -> None: |
|
||||||
self.git_repo = url |
|
||||||
|
|
||||||
def create(self) -> None: |
|
||||||
self.create_folder() |
|
||||||
self.config() |
|
||||||
|
|
||||||
def config(self) -> None: |
|
||||||
conf = { |
|
||||||
"name": self.name, |
|
||||||
"author": self.owner, |
|
||||||
"git": self.git_repo, |
|
||||||
"packages": [] |
|
||||||
} |
|
||||||
Log.info("Configuring package.json") |
|
||||||
file_conf = open("./package.json", "w") |
|
||||||
file_conf.write(json.dumps(conf, indent=4)) |
|
||||||
file_conf.close() |
|
||||||
wks = open("./premake5.lua", "w") |
|
||||||
wks.write(Premake.Wks.get(self.name)) |
|
||||||
wks.close() |
|
||||||
app = open("./app/premake5.lua", "w") |
|
||||||
app.write(Premake.App.get()) |
|
||||||
app.close() |
|
||||||
git_ign = open("./.gitignore", "w") |
|
||||||
git_ign.write("""/vendor/ |
|
||||||
/bin/ |
|
||||||
/bin-int/ |
|
||||||
/.vscode/ |
|
||||||
/.vs/ |
|
||||||
/docs/ |
|
||||||
**.log |
|
||||||
**.sln |
|
||||||
**.vcxproj* |
|
||||||
**.make |
|
||||||
**Makefile |
|
||||||
**dependencies.lua |
|
||||||
**linker.lua |
|
||||||
""") |
|
||||||
|
|
||||||
def create_folder(self) -> None: |
|
||||||
try: |
|
||||||
os.mkdir("./app") |
|
||||||
os.mkdir("./vendor") |
|
||||||
os.mkdir("./app/src") |
|
||||||
except: Log.error("Directory already exists.") |
|
||||||
create_file("./app/src/app.cpp") |
|
||||||
create_file("./premake5.lua") |
|
||||||
create_file("./app/premake5.lua") |
|
||||||
create_file("./package.json") |
|
||||||
create_file("./.gitignore") |
|
||||||
|
|
||||||
def as_git_repo(self) -> bool: |
|
||||||
return os.path.exists("./.git") |
|
||||||
|
|
||||||
def init_git_repo(self) -> None: |
|
||||||
if self.as_git_repo(): |
|
||||||
Log.warning("Git folder already exists.") |
|
||||||
return |
|
||||||
Command.exec("git init --initial-branch=main") |
|
||||||
Command.exec("git add .") |
|
||||||
Command.exec('git commit -m "Initial commit"') |
|
||||||
if len(self.git_repo) > 0: Command.exec(f'git remote add origin ${self.git_repo}') |
|
||||||
|
|
||||||
|
|
||||||
def create_file(path) -> None: |
|
||||||
file = open(path, "w") |
|
||||||
file.close() |
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,56 +0,0 @@ |
|||||||
import platform |
|
||||||
import os |
|
||||||
import json |
|
||||||
import Command |
|
||||||
import Log |
|
||||||
|
|
||||||
def tool_exist(name: str) -> bool: |
|
||||||
from shutil import which |
|
||||||
return which(name) is not None |
|
||||||
|
|
||||||
def search_tools(tools: str) -> str: |
|
||||||
none_tools: str = [] |
|
||||||
for tool in tools: |
|
||||||
if not tool_exist(tool): |
|
||||||
none_tools.append(tool) |
|
||||||
return none_tools |
|
||||||
|
|
||||||
def verifie_build_tools() -> None: |
|
||||||
Log.info(f"Verifing build tools") |
|
||||||
none_tools: str = [] |
|
||||||
match platform.system(): |
|
||||||
case "Windows": |
|
||||||
none_tools = search_tools(["git", "premake5"]) |
|
||||||
case "Linux": |
|
||||||
none_tools = search_tools(["git", "g++", "premake5", "make"]) |
|
||||||
case _: |
|
||||||
Log.error("Platform not supported") |
|
||||||
if len(none_tools) > 0: |
|
||||||
Log.error(f"Tools missing {none_tools}") |
|
||||||
|
|
||||||
def run(config) -> None: |
|
||||||
Log.info("Running app") |
|
||||||
Log.info(f"./bin/{platform.system().lower()}-{platform.machine().lower()}-{config}/App/App") |
|
||||||
if not os.path.exists(f"./bin/{platform.system().lower()}-{platform.machine().lower()}-{config}/App/App"): |
|
||||||
Log.error("Executable not found") |
|
||||||
Command.exec(f"chmod +x ./bin/{platform.system().lower()}-{platform.machine().lower()}-{config}/App/App && ./bin/{platform.system().lower()}-{platform.machine().lower()}-{config}/App/App") |
|
||||||
|
|
||||||
|
|
||||||
def build(config) -> None: |
|
||||||
Log.info(f"Starting build with config {config}") |
|
||||||
verifie_build_tools() |
|
||||||
match platform.system(): |
|
||||||
case "Windows": |
|
||||||
Command.exec("premake5 vs2022") |
|
||||||
Log.info("Build with vscode 2022") |
|
||||||
case "Linux": |
|
||||||
Command.exec("premake5 gmake2") |
|
||||||
Command.exec("premake5 export-compile-commands") |
|
||||||
Command.exec(f"mv ./compile_commands/{config.lower()}.json ./compile_commands.json") |
|
||||||
Command.exec("rm -rf ./compile_commands") |
|
||||||
Command.exec(f"make config={config.lower()}") |
|
||||||
case _: |
|
||||||
Log.error("Platform not supported") |
|
||||||
Log.info("Finished build") |
|
||||||
|
|
||||||
|
|
@ -1,98 +0,0 @@ |
|||||||
import os |
|
||||||
import argparse |
|
||||||
import Package |
|
||||||
import ToolChaine |
|
||||||
import Log |
|
||||||
import Project |
|
||||||
|
|
||||||
def init(args) -> None: |
|
||||||
Log.info(f"Initialising new project : {args.name} by {args.owner}") |
|
||||||
Log.info(f"Path to the project : {args.path}") |
|
||||||
Log.info(f"Git repository : {args.repo}") |
|
||||||
project = Project.Builder(args.name, args.repo, args.owner) |
|
||||||
project.create() |
|
||||||
if args.git_init: |
|
||||||
Log.info("Initialising local git folder") |
|
||||||
project.init_git_repo() |
|
||||||
|
|
||||||
def add(args) -> None: |
|
||||||
Package.add(args.author, args.name) |
|
||||||
|
|
||||||
def remove(args) -> None: |
|
||||||
Package.remove(args.name) |
|
||||||
|
|
||||||
def update(args) -> None: |
|
||||||
Package.update(args.name) |
|
||||||
|
|
||||||
def save(args) -> None: |
|
||||||
Package.save(args.name, args.message, args.paths) |
|
||||||
|
|
||||||
def install(args) -> None: |
|
||||||
Package.install_root() |
|
||||||
|
|
||||||
def doc(args) -> None: |
|
||||||
Package.load_doc(args.package) |
|
||||||
|
|
||||||
def build(args) -> None: |
|
||||||
ToolChaine.build(args.config) |
|
||||||
if (args.run): ToolChaine.run(args.config) |
|
||||||
|
|
||||||
def run(args) -> None: |
|
||||||
ToolChaine.run(args.config) |
|
||||||
|
|
||||||
def bakasable() -> None: |
|
||||||
program_parser = argparse.ArgumentParser(prog="bakasable", description="baka developpement enviromment") |
|
||||||
program_parser.add_argument("-p", "--path", type=str, default="./", dest="path", help="path to the project") |
|
||||||
sub_parsers = program_parser.add_subparsers(title="subcommmands", required=True, help="operations on your project") |
|
||||||
|
|
||||||
init_parser = sub_parsers.add_parser("init", help="initialise a new project") |
|
||||||
init_parser.add_argument("-n", "--name", type=str, required=True, dest="name", help="name of your") |
|
||||||
init_parser.add_argument("-r", "--repo", type=str, default="", dest="repo", help="git repository where project is stored") |
|
||||||
init_parser.add_argument("-a", "--author", type=str, default="", dest="owner", help="owner of the project") |
|
||||||
init_parser.add_argument("-g", "--git-init", action="store_const", const=True, default=False, dest="git_init", help="initialise a local git folder") |
|
||||||
init_parser.set_defaults(func=init) |
|
||||||
|
|
||||||
add_parser = sub_parsers.add_parser("add", help="add a module to your project from github") |
|
||||||
add_parser.add_argument("-n", "--name", type=str, required=True, dest="name", help="name of the github repository") |
|
||||||
add_parser.add_argument("-a", "--author", type=str, required=True, dest="author", help="name of the github user") |
|
||||||
add_parser.add_argument("-r", "--repo", type=str, default="git.anulax.ch", dest="repo", help="git repository where project is stored") |
|
||||||
add_parser.set_defaults(func=add) |
|
||||||
|
|
||||||
remove_parser = sub_parsers.add_parser("remove", help="remove a module from your project") |
|
||||||
remove_parser.add_argument("-n", "--name", type=str, required=True, dest="name", help="name of the github repository") |
|
||||||
remove_parser.set_defaults(func=remove) |
|
||||||
|
|
||||||
update_parser = sub_parsers.add_parser("update", help="updates a module from your project") |
|
||||||
update_parser.add_argument("-n", "--name", type=str, required=True, dest="name", help="name of the github repository") |
|
||||||
update_parser.set_defaults(func=update) |
|
||||||
|
|
||||||
save_parser = sub_parsers.add_parser("save", help="saves a module to it's repo") |
|
||||||
save_parser.add_argument("-n", "--name", type=str, required=True, dest="name", help="name of the github repository") |
|
||||||
save_parser.add_argument("-m", "--message", type=str, required=True, dest="message", help="message of the git commit") |
|
||||||
save_parser.add_argument("-p", "--path", type=str, nargs="+", dest="paths", help="files to archive else archive all") |
|
||||||
save_parser.set_defaults(func=save) |
|
||||||
|
|
||||||
install_parser = sub_parsers.add_parser("install", help="installs the dependencies of your project") |
|
||||||
install_parser.set_defaults(func=install) |
|
||||||
|
|
||||||
doc_parser = sub_parsers.add_parser("doc", help="documents a module from your project if it as one") |
|
||||||
doc_parser.add_argument("-n", "--name", type=str, required=True, dest="name", help="name of the github repository") |
|
||||||
doc_parser.set_defaults(func=doc) |
|
||||||
|
|
||||||
build_parser = sub_parsers.add_parser("build", help="") |
|
||||||
build_parser.add_argument("-c", "--config", type=str, dest="config", help="", choices=["Debug", "Release"], default="Debug") |
|
||||||
build_parser.add_argument("-r", "--run", action="store_const", const=True, default=False, dest="run", help="") |
|
||||||
build_parser.set_defaults(func=build) |
|
||||||
|
|
||||||
run_parser = sub_parsers.add_parser("run", help="") |
|
||||||
run_parser.add_argument("-c", "--config", type=str, dest="config", help="", choices=["Debug", "Release"], default="Debug") |
|
||||||
run_parser.set_defaults(func=run) |
|
||||||
|
|
||||||
args = program_parser.parse_args() |
|
||||||
if not os.path.exists(args.path): os.mkdir(args.path) |
|
||||||
os.chdir(args.path) |
|
||||||
args.func(args) |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
Log.logo() |
|
||||||
bakasable() |
|
Loading…
Reference in New Issue