summaryrefslogtreecommitdiff
path: root/generate-canary-manifest.py
blob: a02edd368247bd604d037c85f1d296a8b24a2754 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# -*- coding: utf-8 -*-
# Copyright (C) 2021 Igalia S.L.
#
# This file is part of Epiphany.
#
# Epiphany is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Epiphany is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Epiphany.  If not, see <http://www.gnu.org/licenses/>.

from html.parser import HTMLParser
import argparse
import hashlib
import json
import os
import re
import sys
import urllib.request

ZIP_FILE = "webkitgtk.zip"

# FIXME: Might be worth adding some JSON file listing builds on the servers.
class MyHTMLParser(HTMLParser):
    builds = []
    def handle_starttag(self, tag, attrs):
        if tag != "a":
            return
        for (name, value) in attrs:
            if name == "href" and (value.startswith("release") or value.startswith("debug")):
                self.builds.append(value)

def download_zipped_build(build_type):
    url = f"https://webkitgtk-{build_type}.igalia.com/built-products/"
    with urllib.request.urlopen(url) as page_fd:
        parser = MyHTMLParser()
        parser.feed(page_fd.read().decode("utf-8"))
        try:
            latest = parser.builds[-1]
        except IndexError:
            print(f"No build found in {url}")
            return ("", "")

    print(f"Downloading build {latest} from {url}")
    zip_file = open(ZIP_FILE, "wb")

    def update(blocks, bs, size):
        done = int(50 * blocks * bs / size)
        sys.stdout.write('\r[{}{}]'.format('█' * done, '.' * (50 - done)))
        sys.stdout.flush()

    urllib.request.urlretrieve(f"{url}/{latest}", ZIP_FILE, update)
    h = hashlib.new('sha256')
    with open(ZIP_FILE, "rb") as f:
        h.update(f.read())

    checksum = h.hexdigest()
    return (ZIP_FILE, checksum)

def main(args):
    parser = argparse.ArgumentParser()
    type_group = parser.add_mutually_exclusive_group()
    type_group.add_argument("--debug", help="Download a debug build.",
                            dest='build_type', action="store_const", const="Debug")
    type_group.add_argument("--release", help="Download a release build.",
                            dest='build_type', action="store_const", const="Release")

    if len(args) == 0:
        parser.print_help(sys.stderr)
        return 1

    parsed, _ = parser.parse_known_args(args=args)
    zip_filename, checksum = download_zipped_build(parsed.build_type.lower())
    if not zip_filename:
        return 2

    manifest_path = "org.gnome.Epiphany.Canary.json"
    with open(f"{manifest_path}.in") as fd_in:
        json_input = json.load(fd_in)
        pwd = os.path.abspath(os.curdir)
        for module in json_input['modules']:
            if module['name'] == 'webkitgtk':
                path = os.path.join(pwd, zip_filename)
                module['sources'] = [{'type': 'archive', 'url': f'file://{path}', 'sha256': checksum,
                                      'strip-components': 0}]
            elif module['name'] == 'epiphany':
                module['sources'] = [{'type': 'dir', 'path': pwd}]
        with open(manifest_path, 'w') as fd_out:
            json.dump(json_input, fd_out, indent=4)

    return 0

if __name__ == "__main__":
    sys.exit(main(sys.argv[1:]))