aboutsummaryrefslogtreecommitdiff
path: root/src_doit
diff options
context:
space:
mode:
authorbozo.kopic <bozo@kopic.xyz>2021-11-07 15:52:44 +0100
committerbozo.kopic <bozo@kopic.xyz>2021-12-18 02:38:50 +0100
commit0702d13263bf501c1db074ce1544e60b95161210 (patch)
treeebca76946cead0ffcc742a64c15dd6f5e79958fa /src_doit
parent56a75fcb8f5a9e4c05ccec8eb4a3345a115da441 (diff)
major rewritev0.3.0
Diffstat (limited to 'src_doit')
-rw-r--r--src_doit/__init__.py156
-rw-r--r--src_doit/dist/__init__.py113
-rw-r--r--src_doit/dist/container/Dockerfile6
-rw-r--r--src_doit/dist/windows/opcut-server.bat2
-rw-r--r--src_doit/dist/windows/opcut.bat2
5 files changed, 279 insertions, 0 deletions
diff --git a/src_doit/__init__.py b/src_doit/__init__.py
new file mode 100644
index 0000000..f1d9a2f
--- /dev/null
+++ b/src_doit/__init__.py
@@ -0,0 +1,156 @@
+from pathlib import Path
+import subprocess
+import tempfile
+
+from hat import json
+from hat.doit import common
+from hat.doit.py import (build_wheel,
+ run_pytest,
+ run_flake8)
+from hat.doit.js import run_eslint
+
+from .dist import * # NOQA
+from . import dist
+
+
+__all__ = ['task_clean_all',
+ 'task_build',
+ 'task_check',
+ 'task_test',
+ 'task_ui',
+ 'task_deps',
+ 'task_json_schema_repo',
+ *dist.__all__]
+
+
+build_dir = Path('build')
+src_py_dir = Path('src_py')
+src_js_dir = Path('src_js')
+src_static_dir = Path('src_static')
+pytest_dir = Path('test_pytest')
+docs_dir = Path('docs')
+schemas_dir = Path('schemas')
+node_modules_dir = Path('node_modules')
+
+ui_dir = src_py_dir / 'opcut/ui'
+json_schema_repo_path = src_py_dir / 'opcut/json_schema_repo.json'
+
+
+def task_clean_all():
+ """Clean all"""
+ return {'actions': [(common.rm_rf, [build_dir,
+ ui_dir,
+ json_schema_repo_path])]}
+
+
+def task_build():
+ """Build"""
+
+ def build():
+ build_wheel(
+ src_dir=src_py_dir,
+ dst_dir=build_dir,
+ name='opcut',
+ description='Cutting stock problem optimizer',
+ url='https://github.com/bozokopic/opcut',
+ license=common.License.GPL3,
+ packages=['opcut'],
+ console_scripts=['opcut = opcut.main:main'])
+
+ return {'actions': [build],
+ 'task_dep': ['ui',
+ 'json_schema_repo']}
+
+
+def task_check():
+ """Check"""
+ return {'actions': [(run_flake8, [src_py_dir]),
+ (run_flake8, [pytest_dir]),
+ (run_eslint, [src_js_dir])],
+ 'task_dep': ['deps']}
+
+
+def task_test():
+ """Test"""
+ return {'actions': [(common.mkdir_p, [ui_dir]),
+ lambda args: run_pytest(pytest_dir, *(args or []))],
+ 'pos_arg': 'args',
+ 'task_dep': ['json_schema_repo']}
+
+
+def task_ui():
+ """Build UI"""
+
+ def build(args):
+ args = args or []
+ common.rm_rf(ui_dir)
+ common.cp_r(src_static_dir, ui_dir)
+ common.cp_r(schemas_dir, ui_dir)
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmpdir = Path(tmpdir)
+ config_path = tmpdir / 'webpack.config.js'
+ config_path.write_text(_webpack_conf.format(
+ src_path=(src_js_dir / 'main.js').resolve(),
+ dst_dir=ui_dir.resolve()))
+ subprocess.run([str(node_modules_dir / '.bin/webpack'),
+ '--config', str(config_path),
+ *args],
+ check=True)
+
+ return {'actions': [build],
+ 'pos_arg': 'args',
+ 'task_dep': ['deps']}
+
+
+def task_deps():
+ """Install dependencies"""
+ return {'actions': ['yarn install --silent']}
+
+
+def task_json_schema_repo():
+ """Generate JSON Schema Repository"""
+ src_paths = [schemas_dir / 'opcut.yaml']
+
+ def generate():
+ repo = json.SchemaRepository(*src_paths)
+ data = repo.to_json()
+ json.encode_file(data, json_schema_repo_path, indent=None)
+
+ return {'actions': [generate],
+ 'file_dep': src_paths,
+ 'targets': [json_schema_repo_path]}
+
+
+_webpack_conf = r"""
+module.exports = {{
+ mode: 'none',
+ entry: '{src_path}',
+ output: {{
+ filename: 'main.js',
+ path: '{dst_dir}'
+ }},
+ module: {{
+ rules: [
+ {{
+ test: /\.scss$/,
+ use: [
+ "style-loader",
+ {{
+ loader: "css-loader",
+ options: {{url: false}}
+ }},
+ {{
+ loader: "sass-loader",
+ options: {{sourceMap: true}}
+ }}
+ ]
+ }}
+ ]
+ }},
+ watchOptions: {{
+ ignored: /node_modules/
+ }},
+ devtool: 'source-map',
+ stats: 'errors-only'
+}};
+"""
diff --git a/src_doit/dist/__init__.py b/src_doit/dist/__init__.py
new file mode 100644
index 0000000..de8d98c
--- /dev/null
+++ b/src_doit/dist/__init__.py
@@ -0,0 +1,113 @@
+from pathlib import Path
+import subprocess
+import zipfile
+import sys
+
+from hat.doit import common
+
+
+__all__ = ['task_dist',
+ 'task_dist_windows',
+ 'task_dist_container']
+
+
+package_path = Path(__file__).parent
+
+build_dir = Path('build')
+cache_dir = Path('cache')
+
+build_windows_dir = build_dir / f'opcut-{common.get_version()}-windows'
+build_container_dir = build_dir / f'opcut-{common.get_version()}-container'
+
+win_python_url = 'https://www.python.org/ftp/python/3.9.7/python-3.9.7-embed-amd64.zip' # NOQA
+cache_win_python_path = cache_dir / win_python_url.split('/')[-1]
+
+
+def task_dist():
+ """Build distribution"""
+
+ return {'actions': None,
+ 'task_dep': ['dist_windows',
+ 'dist_container']}
+
+
+def task_dist_windows():
+ """Build windows distribution"""
+
+ def build():
+ common.rm_rf(build_windows_dir)
+ common.mkdir_p(build_windows_dir.parent)
+ common.cp_r(package_path / 'windows', build_windows_dir)
+
+ common.mkdir_p(cache_dir)
+ if not cache_win_python_path.exists():
+ subprocess.run(['curl', '-s',
+ '-o', str(cache_win_python_path),
+ '-L', win_python_url],
+ check=True)
+
+ python_dir = build_windows_dir / 'python'
+ common.mkdir_p(python_dir)
+ with zipfile.ZipFile(str(cache_win_python_path)) as f:
+ f.extractall(str(python_dir))
+
+ python_lib_path = python_dir / 'python39.zip'
+ python_lib_dir = python_dir / 'lib'
+ common.mkdir_p(build_windows_dir / 'python/lib')
+ with zipfile.ZipFile(str(python_lib_path)) as f:
+ f.extractall(str(python_lib_dir))
+ common.rm_rf(python_lib_path)
+
+ (python_dir / 'python39._pth').write_text(
+ '..\\packages\n'
+ 'lib\n'
+ '.\n'
+ 'import site\n'
+ )
+
+ packages_dir = build_windows_dir / 'packages'
+ common.mkdir_p(packages_dir)
+
+ packages = [*(str(i) for i in (build_dir / 'dist').glob('*.whl'))]
+ subprocess.run(['pip', 'install', '-q',
+ '-t', str(packages_dir),
+ '--only-binary=:all:',
+ '--platform', 'win_amd64',
+ *packages],
+ check=True)
+
+ zip_path = build_dir / f'{build_windows_dir.name}.zip'
+ common.rm_rf(zip_path)
+ with zipfile.ZipFile(str(zip_path), 'w', zipfile.ZIP_DEFLATED) as f:
+ for i in build_windows_dir.rglob('*'):
+ if i.is_dir():
+ continue
+ f.write(str(i), str(i.relative_to(build_windows_dir)))
+
+ return {'actions': [build],
+ 'task_dep': ['build']}
+
+
+def task_dist_container():
+ """Build container distribution"""
+
+ def build():
+ common.rm_rf(build_container_dir)
+ common.mkdir_p(build_container_dir.parent)
+ common.cp_r(package_path / 'container', build_container_dir)
+
+ for i in (build_dir / 'dist').glob('*.whl'):
+ common.cp_r(i, build_container_dir / i.name)
+
+ name = f'opcut:{common.get_version()}'
+ img_path = build_dir / f'{build_container_dir.name}.tar'
+
+ subprocess.run(['podman', 'build', '-q', '-t', name, '.'],
+ cwd=str(build_container_dir),
+ check=True)
+
+ subprocess.run(['podman', 'save', '-q', '-o', str(img_path), name],
+ check=True)
+
+ return {'actions': [build],
+ 'task_dep': ['build']}
diff --git a/src_doit/dist/container/Dockerfile b/src_doit/dist/container/Dockerfile
new file mode 100644
index 0000000..32978c1
--- /dev/null
+++ b/src_doit/dist/container/Dockerfile
@@ -0,0 +1,6 @@
+FROM python:3.9-slim-bullseye
+COPY *.whl .
+RUN apt update -qy && \
+ apt install -qy pkg-config gcc libcairo2-dev && \
+ pip install -qq *.whl && \
+ rm *.whl
diff --git a/src_doit/dist/windows/opcut-server.bat b/src_doit/dist/windows/opcut-server.bat
new file mode 100644
index 0000000..25ce4f9
--- /dev/null
+++ b/src_doit/dist/windows/opcut-server.bat
@@ -0,0 +1,2 @@
+@set PATH=%~dp0python;%PATH%
+@python -m opcut server %*
diff --git a/src_doit/dist/windows/opcut.bat b/src_doit/dist/windows/opcut.bat
new file mode 100644
index 0000000..68ae30a
--- /dev/null
+++ b/src_doit/dist/windows/opcut.bat
@@ -0,0 +1,2 @@
+@set PATH=%~dp0python;%PATH%
+@python -m opcut %*