aboutsummaryrefslogtreecommitdiff
path: root/src_doit/__init__.py
blob: 6773485e32f88debb5f94f67576fe3fd850dc762 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from pathlib import Path

from hat import json
from hat.doit import common
from hat.doit.py import (build_wheel,
                         run_flake8)


__all__ = ['task_clean_all',
           'task_wheel',
           'task_check',
           'task_json_schema_repo']


build_dir = Path('build')
src_py_dir = Path('src_py')
schemas_json_dir = Path('schemas_json')

json_schema_repo_path = src_py_dir / 'hatter/json_schema_repo.json'


def task_clean_all():
    """Clean all"""
    return {'actions': [(common.rm_rf, [build_dir,
                                        json_schema_repo_path])]}


def task_wheel():
    """Build wheel"""

    def build():
        build_wheel(
            src_dir=src_py_dir,
            dst_dir=build_dir,
            name='hatter',
            description='Continuous integration server/executor',
            url='https://github.com/bozokopic/hatter',
            license=common.License.GPL3,
            packages=['hatter'],
            console_scripts=['hatter = hatter.main:main'])

    return {'actions': [build],
            'task_dep': ['json_schema_repo']}


def task_check():
    """Check"""
    return {'actions': [(run_flake8, [src_py_dir])]}


def task_json_schema_repo():
    """Generate JSON Schema Repository"""
    src_paths = list(schemas_json_dir.rglob('*.yaml'))

    def generate():
        repo = json.SchemaRepository(*src_paths)
        data = repo.to_json()
        json.encode_file(data, json_schema_repo_path, indent=None)

    return {'actions': [generate],
            'file_dep': src_paths,
            'targets': [json_schema_repo_path]}