From 333dfc65d327cd688aa74fd930d13e3e7a5014c0 Mon Sep 17 00:00:00 2001 From: bozokopic Date: Mon, 24 Apr 2017 16:33:44 +0200 Subject: doit script --- .gitignore | 8 +++ README | 54 ------------------- README.rst | 54 +++++++++++++++++++ dodo.py | 14 +++++ requirements.pip.txt | 4 ++ schemas_json/logging.yaml | 123 +++++++++++++++++++++++++++++++++++++++++++ src_js/opcut/main.js | 0 src_py/opcut/doit/_common.py | 28 ++++++++++ src_py/opcut/doit/jsopcut.py | 85 ++++++++++++++++++++++++++++++ src_py/opcut/doit/main.py | 46 ++++++++++++++++ src_py/opcut/doit/pyopcut.py | 104 ++++++++++++++++++++++++++++++++++++ 11 files changed, 466 insertions(+), 54 deletions(-) create mode 100644 .gitignore delete mode 100644 README create mode 100644 README.rst create mode 100644 dodo.py create mode 100644 requirements.pip.txt create mode 100644 schemas_json/logging.yaml create mode 100644 src_js/opcut/main.js create mode 100644 src_py/opcut/doit/_common.py create mode 100644 src_py/opcut/doit/jsopcut.py create mode 100644 src_py/opcut/doit/main.py create mode 100644 src_py/opcut/doit/pyopcut.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f0ef978 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +__pycache__ +/build +/dist +/.doit.* +/yarn.lock +/node_modules +/src_py/opcut/json_validator.py +/src_js/opcut/validator.js diff --git a/README b/README deleted file mode 100644 index c0b7955..0000000 --- a/README +++ /dev/null @@ -1,54 +0,0 @@ -optcut -====== - -`https://opcut.herokuapp.com/`_ - -`optcut` is cutting stock problem optimizer -(`https://en.wikipedia.org/wiki/Cutting_stock_problem`_) utilizing multiple -panels and guillotine cuts (end-to-end cuts). This project includes multiple -back-end optimizer implementations and single-page web application front-end. - - -Runtime requirements --------------------- - -* python >=3.6 - -Additional required python packages are listed in `requirements.pip.txt`. - - -Development requirements ------------------------- - -* nodejs >=7 -* yarn - - -TODO ----- - -* global - - * organize build actions using pydoit and webpack - * create heroku configuration - * create CONTRIBUTING - -* optimizer - - * evaluate research papers and proposed algorithms - * define optimizer api - * implement multiple algorithms in python - * evaluate python implementations and do native rewrites is needed - -* back-end - - * define json schemas and communication interface between back-end and - front-end - * basic backend implementation in python - * additional functionality (multiple output formats) - -* front-end - - * create temporary web page - * implement communication with back-end - * additional GUI refactoring diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..c0b7955 --- /dev/null +++ b/README.rst @@ -0,0 +1,54 @@ +optcut +====== + +`https://opcut.herokuapp.com/`_ + +`optcut` is cutting stock problem optimizer +(`https://en.wikipedia.org/wiki/Cutting_stock_problem`_) utilizing multiple +panels and guillotine cuts (end-to-end cuts). This project includes multiple +back-end optimizer implementations and single-page web application front-end. + + +Runtime requirements +-------------------- + +* python >=3.6 + +Additional required python packages are listed in `requirements.pip.txt`. + + +Development requirements +------------------------ + +* nodejs >=7 +* yarn + + +TODO +---- + +* global + + * organize build actions using pydoit and webpack + * create heroku configuration + * create CONTRIBUTING + +* optimizer + + * evaluate research papers and proposed algorithms + * define optimizer api + * implement multiple algorithms in python + * evaluate python implementations and do native rewrites is needed + +* back-end + + * define json schemas and communication interface between back-end and + front-end + * basic backend implementation in python + * additional functionality (multiple output formats) + +* front-end + + * create temporary web page + * implement communication with back-end + * additional GUI refactoring diff --git a/dodo.py b/dodo.py new file mode 100644 index 0000000..f1a6eb5 --- /dev/null +++ b/dodo.py @@ -0,0 +1,14 @@ +import sys +import os + +sys.path += ['src_py'] + +os.environ['PYTHONPATH'] = os.pathsep.join(map( + os.path.abspath, ['src_py'])) + +DOIT_CONFIG = { + 'backend': 'sqlite3', + 'default_tasks': ['dist_build'], + 'verbosity': 2} + +from opcut.doit.main import * # NOQA diff --git a/requirements.pip.txt b/requirements.pip.txt new file mode 100644 index 0000000..4db3939 --- /dev/null +++ b/requirements.pip.txt @@ -0,0 +1,4 @@ +doit +pyyaml +jsonschema +aiohttp diff --git a/schemas_json/logging.yaml b/schemas_json/logging.yaml new file mode 100644 index 0000000..0d2f21d --- /dev/null +++ b/schemas_json/logging.yaml @@ -0,0 +1,123 @@ +--- +"$schema": "http://json-schema.org/schema#" +id: "opcut://logging.yaml#" +title: Logging +description: Logging configuration +type: object +required: + - version +properties: + version: + title: Version + type: integer + default: 1 + formatters: + title: Formatters + type: object + patternProperties: + "(.)+": + title: Formatter + type: object + properties: + format: + title: Format + type: string + default: null + datefmt: + title: Date format + type: string + default: null + filters: + title: Filters + type: object + patternProperties: + "(.)+": + title: Filter + type: object + properties: + name: + title: Logger name + type: string + default: '' + handlers: + title: Handlers + type: object + patternProperties: + "(.)+": + title: Handler + type: object + description: | + Additional properties are passed as keyword arguments to + constructor + required: + - class + properties: + class: + title: Class + type: string + level: + title: Level + type: string + formatter: + title: Formatter + type: string + filters: + title: Filters + type: array + items: + title: Filter id + type: string + loggers: + title: Loggers + type: object + patternProperties: + "(.)+": + title: Logger + type: object + properties: + level: + title: Level + type: string + propagate: + title: Propagate + type: boolean + filters: + title: Filters + type: array + items: + title: Filter id + type: string + handlers: + title: Handlers + type: array + items: + title: Handler id + type: string + root: + title: Root logger + type: object + properties: + level: + title: Level + type: string + filters: + title: Filters + type: array + items: + title: Filter id + type: string + handlers: + title: Handlers + type: array + items: + title: Handler id + type: string + incremental: + title: Incremental configuration + type: boolean + default: false + disable_existing_loggers: + title: Disable existing loggers + type: boolean + default: true +... diff --git a/src_js/opcut/main.js b/src_js/opcut/main.js new file mode 100644 index 0000000..e69de29 diff --git a/src_py/opcut/doit/_common.py b/src_py/opcut/doit/_common.py new file mode 100644 index 0000000..8e33791 --- /dev/null +++ b/src_py/opcut/doit/_common.py @@ -0,0 +1,28 @@ +import os +import shutil +from pathlib import Path + + +def mkdir_p(*paths): + for path in paths: + os.makedirs(str(Path(path)), exist_ok=True) + + +def rm_rf(*paths): + for path in paths: + p = Path(path) + if not p.exists(): + continue + if p.is_dir(): + shutil.rmtree(str(p), ignore_errors=True) + else: + p.unlink() + + +def cp_r(src, dest): + src = Path(src) + dest = Path(dest) + if src.is_dir(): + shutil.copytree(str(src), str(dest)) + else: + shutil.copy2(str(src), str(dest)) diff --git a/src_py/opcut/doit/jsopcut.py b/src_py/opcut/doit/jsopcut.py new file mode 100644 index 0000000..fcfd8dc --- /dev/null +++ b/src_py/opcut/doit/jsopcut.py @@ -0,0 +1,85 @@ +import json +import yaml +import subprocess +from pathlib import Path + +from opcut.doit import _common + + +__all__ = ['task_jsopcut_clean', 'task_jsopcut_install_deps', + 'task_jsopcut_remove_deps', 'task_jsopcut_gen', + 'task_jsopcut_gen_validator', 'task_jsopcut_build', + 'task_jsopcut_watch'] + + +def task_jsopcut_clean(): + """JsOpcut - clean""" + + return {'actions': [(_common.rm_rf, ['build/jsopcut', + 'src_js/opcut/validator.js'])]} + + +def task_jsopcut_install_deps(): + """JsOpcut - install dependencies""" + + def patch(): + subprocess.Popen(['patch', '-r', '/dev/null', '--forward', '-p0', + '-i', 'node_modules.patch'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL).wait() + + return {'actions': ['yarn install', + patch]} + + +def task_jsopcut_remove_deps(): + """JsOpcut - remove dependencies""" + + return {'actions': [(_common.rm_rf, ['node_modules', 'yarn.lock'])]} + + +def task_jsopcut_gen(): + """JsOpcut - generate additional JavaScript modules""" + + return {'actions': None, + 'task_dep': ['jsopcut_gen_validator']} + + +def task_jsopcut_gen_validator(): + """JsOpcut - generate json validator""" + + schema_files = list(Path('schemas_json').glob('**/*.yaml')) + output_file = Path('src_js/opcut/validator.js') + + def parse_schemas(): + for schema_file in schema_files: + with open(schema_file, encoding='utf-8') as f: + yield yaml.safe_load(f) + + def generate_output(): + schemas_json = json.dumps(list(parse_schemas()), indent=4) + with open(output_file, 'w', encoding='utf-8') as f: + f.write( + 'import tv4 from "tv4";\n\n\n' + + schemas_json + '.forEach(i => tv4.addSchema(i.id, i));\n\n\n' + + 'export function validate(data, schemaId) {\n' + + ' return tv4.validate(data, tv4.getSchema(schemaId));\n' + + '}\n') + + return {'actions': [generate_output], + 'file_dep': schema_files, + 'targets': [output_file]} + + +def task_jsopcut_build(): + """JsOpcut - build""" + + return {'actions': ['yarn run build'], + 'task_dep': ['jsopcut_install_deps', 'jsopcut_gen']} + + +def task_jsopcut_watch(): + """JsOpcut - build on change""" + + return {'actions': ['yarn run watch'], + 'task_dep': ['jsopcut_install_deps', 'jsopcut_gen']} diff --git a/src_py/opcut/doit/main.py b/src_py/opcut/doit/main.py new file mode 100644 index 0000000..92e4fc9 --- /dev/null +++ b/src_py/opcut/doit/main.py @@ -0,0 +1,46 @@ + +from opcut.doit import _common + +import opcut.doit.pyopcut +import opcut.doit.jsopcut +from opcut.doit.pyopcut import * # NOQA +from opcut.doit.jsopcut import * # NOQA + + +__all__ = (['task_clean_all', 'task_gen_all', 'task_dist_build', + 'task_dist_clean'] + + opcut.doit.pyopcut.__all__ + + opcut.doit.jsopcut.__all__) + + +def task_clean_all(): + """Clean all""" + + return {'actions': [(_common.rm_rf, ['build', 'dist'])], + 'task_dep': ['pyopcut_clean', + 'jsopcut_clean', + 'dist_clean']} + + +def task_gen_all(): + """Generate all""" + + return {'actions': None, + 'task_dep': ['pyopcut_gen', + 'jsopcut_gen']} + + +def task_dist_clean(): + """Distribution - clean""" + + return {'actions': [(_common.rm_rf, ['dist'])]} + + +def task_dist_build(): + """Distribution - build (DEFAULT)""" + + return {'actions': [(_common.mkdir_p, ['dist'])], + 'task_dep': [ + 'gen_all', + 'pyopcut_build', + 'jsopcut_build']} diff --git a/src_py/opcut/doit/pyopcut.py b/src_py/opcut/doit/pyopcut.py new file mode 100644 index 0000000..8582f95 --- /dev/null +++ b/src_py/opcut/doit/pyopcut.py @@ -0,0 +1,104 @@ +import py_compile +import os +import yaml +from pathlib import Path + +from opcut.doit import _common + + +__all__ = ['task_pyopcut_clean', 'task_pyopcut_build', 'task_pyopcut_gen', + 'task_pyopcut_gen_json_validator'] + + +def task_pyopcut_clean(): + """PyOpcut - clean""" + + return {'actions': [(_common.rm_rf, ['build/pyopcut', + 'src_py/opcut/json_validator.py'])]} + + +def task_pyopcut_build(): + """PyOpcut - build""" + + generated_files = {Path('src_py/opcut/json_validator.py')} + + def compile(src_path, dst_path): + _common.mkdir_p(dst_path.parent) + if src_path.suffix == '.py': + py_compile.compile(src_path, dst_path, doraise=True) + else: + _common.cp_r(src_path, dst_path) + + def create_subtask(src_path): + dst_path = Path('build/pyopcut') / src_path.relative_to('src_py') + if dst_path.suffix == '.py': + dst_path = dst_path.with_suffix('.pyc') + return {'name': str(src_path), + 'actions': [(compile, [src_path, dst_path])], + 'file_dep': [src_path], + 'targets': [dst_path]} + + for src_path in generated_files: + yield create_subtask(src_path) + + for dirpath, dirnames, filenames in os.walk('src_py'): + if '__pycache__' in dirnames: + dirnames.remove('__pycache__') + for i in filenames: + src_path = Path(dirpath) / i + if src_path not in generated_files: + yield create_subtask(src_path) + + +def task_pyopcut_gen(): + """PyOpcut - generate additional python modules""" + + return {'actions': None, + 'task_dep': ['pyopcut_gen_json_validator']} + + +def task_pyopcut_gen_json_validator(): + """PyOpcut - generate json validator""" + + schema_files = list(Path('schemas_json').glob('**/*.yaml')) + output_file = Path('src_py/opcut/json_validator.py') + + def parse_schemas(): + schemas = {} + for schema_file in schema_files: + with open(schema_file, encoding='utf-8') as f: + data = yaml.safe_load(f) + if data['id'] in schemas: + raise Exception("duplicate schema id " + data['id']) + schemas[data['id']] = data + return schemas + + def generate_output(): + schemas = parse_schemas() + with open(output_file, 'w', encoding='utf-8') as f: + f.write( + '# pylint: skip-file\n' + 'import jsonschema\n\n\n' + '_schemas = {schemas} # NOQA\n\n\n' + 'def validate(data, schema_id):\n' + ' """ Validate data with JSON schema\n\n' + ' Args:\n' + ' data: validated data\n' + ' schema_id (str): JSON schema identificator\n\n' + ' Raises:\n' + ' Exception: validation fails\n\n' + ' """\n' + ' base_uri = schema_id.split("#")[0] + "#"\n' + ' resolver = jsonschema.RefResolver(\n' + ' base_uri=base_uri,\n' + ' referrer=_schemas[base_uri],\n' + ' store=_schemas,\n' + ' cache_remote=False)\n' + ' jsonschema.validate(\n' + ' instance=data,\n' + ' schema=resolver.resolve(schema_id)[1],\n' + ' resolver=resolver)\n'.format(schemas=schemas)) + + return {'actions': [generate_output], + 'file_dep': schema_files, + 'targets': [output_file]} -- cgit v1.2.3-70-g09d2