| Index: components/ntp_tiles/update_default_sites_resources.py
|
| diff --git a/components/ntp_tiles/update_default_sites_resources.py b/components/ntp_tiles/update_default_sites_resources.py
|
| new file mode 100755
|
| index 0000000000000000000000000000000000000000..fdda791e8bd0da2352b48089c9a1dddc727c4c85
|
| --- /dev/null
|
| +++ b/components/ntp_tiles/update_default_sites_resources.py
|
| @@ -0,0 +1,146 @@
|
| +#!/usr/bin/env python2
|
| +# Copyright 2017 The Chromium Authors. All rights reserved.
|
| +# Use of this source code is governed by a BSD-style license that can be
|
| +# found in the LICENSE file.
|
| +
|
| +from __future__ import absolute_import
|
| +from __future__ import division
|
| +from __future__ import print_function
|
| +
|
| +import argparse
|
| +import contextlib
|
| +import glob
|
| +import io
|
| +import json
|
| +import os
|
| +import sys
|
| +import urllib
|
| +import Image
|
| +
|
| +"""
|
| +This script downloads the default popular sites and large icons associated
|
| +with it. If an icon is too large, it will get resized in the process.
|
| +"""
|
| +
|
| +DEFAULT_POPULAR_SITES = ("https://www.gstatic.com/chrome/ntp/"
|
| + "suggested_sites_DEFAULT_5.json")
|
| +
|
| +LARGE_ICON_KEY = "large_icon_url"
|
| +SITE_TITLE_KEY = "title"
|
| +MAXIMAL_SIZE = 144
|
| +SITE_ICON_DELETE = "icon[0-9].png"
|
| +SITE_ICON_FORMAT = "icon%d.png"
|
| +NTP_TILES_RESOURCE_PATH = os.path.join(
|
| + os.path.dirname(os.path.realpath(__file__)), "resources")
|
| +DEFAULT_POPULAR_SITES_PATH = os.path.join(NTP_TILES_RESOURCE_PATH,
|
| + "default_popular_sites.json")
|
| +
|
| +def download_as_json(url):
|
| + """Downloads data from the given |url| and returns it in JSON format."""
|
| + print("Downloading popular sites... (" + url + ")")
|
| + with contextlib.closing(urllib.urlopen(url=url)) as url_data:
|
| + data = json.load(url_data)
|
| + print("... done. (%d sites found)" % len(data))
|
| + return data
|
| +
|
| +def download_as_image(url):
|
| + """Downloads data from the given |url| and returns it as Image."""
|
| + with contextlib.closing(urllib.urlopen(url=url)) as url_data:
|
| + image_response_data = url_data.read()
|
| + return Image.open(io.BytesIO(image_response_data))
|
| +
|
| +def write_to_json(data, out_path, pretty_print):
|
| + """Writes |data| to the given |out_path|. Minifies the JSON unless
|
| + |pretty_print| is set to true."""
|
| + separators = (",", ":")
|
| + indent = None
|
| + sort = False
|
| + if pretty_print:
|
| + separators = (",", ": ")
|
| + indent = 4
|
| + sort = True
|
| + with open(out_path, "w") as f:
|
| + json.dump(data, f, sort_keys=sort, indent=indent, separators=separators)
|
| + print("JSON was written to " + out_path)
|
| +
|
| +def delete_old_icons():
|
| + """Deletes all PNG icons within the resource path."""
|
| + print("Deleting old icons..")
|
| + for f in glob.glob(os.path.join(NTP_TILES_RESOURCE_PATH, SITE_ICON_DELETE)):
|
| + os.remove(os.path.join(f))
|
| + print("... done.")
|
| +
|
| +def resize_if_too_large(image, max_size):
|
| + """Takes a square icon and resizes if it exceeds the maximal width."""
|
| + if image.size[0] > max_size:
|
| + print("... and resizing image from %s to %s ..." %
|
| + (image.size, (max_size, max_size)));
|
| + image.thumbnail((max_size, max_size), Image.ANTIALIAS)
|
| +
|
| +
|
| +
|
| +def lacks_required_keys(site):
|
| + """A site must at least provide a title and a large icon to be processed."""
|
| + return not SITE_TITLE_KEY in site or not LARGE_ICON_KEY in site
|
| +
|
| +
|
| +def main():
|
| + parser = argparse.ArgumentParser(
|
| + description="Downloads the latest popular sites and their icons. \n\n"
|
| + "It is possible to customize the default like this:\n"
|
| + " 1. python " + __file__ + " -o temp.json --no_icons "
|
| + "--pretty_print\n"
|
| + " 2. Adjust the downloaded temp.json\n"
|
| + " 3. python " + __file__ + " -f temp.json -s 96\n\n"
|
| + "The result would be a minified version of your customized JSON "
|
| + "and all icons would be downloaded as you specified.\n The icons "
|
| + "had a max size of 96x96.",
|
| + formatter_class=argparse.RawTextHelpFormatter)
|
| + parser.add_argument("-s", "--size", metavar="size_in_px", type=int,
|
| + default=MAXIMAL_SIZE,
|
| + help="size to scale too large icons down to; defaults "
|
| + "to 144px")
|
| + parser.add_argument("-u", "--url", type=str,
|
| + default=DEFAULT_POPULAR_SITES,
|
| + help="the endpoint to query for json of sites")
|
| + parser.add_argument("--no_icons", action="store_true",
|
| + help="do not download icons")
|
| + parser.add_argument("--no_resizing", action="store_true",
|
| + help="do not resize any icons")
|
| + parser.add_argument("-f", "--in_file", metavar="path_to_json_file",
|
| + type=str,
|
| + help="skip download and load icons for a local json")
|
| + parser.add_argument("-o", "--out_path", metavar="path_to_out_file",
|
| + type=str, default=DEFAULT_POPULAR_SITES_PATH,
|
| + help="skip download and load icons for a local json")
|
| + parser.add_argument("-p", "--pretty_print", action="store_true",
|
| + help="pretty_print instead of minifying the JSON")
|
| + args = parser.parse_args()
|
| +
|
| + if args.in_file:
|
| + with open(args.in_file) as f:
|
| + popular_sites = json.load(f)
|
| + else:
|
| + popular_sites = download_as_json(args.url)
|
| + write_to_json(popular_sites, args.out_path, args.pretty_print)
|
| +
|
| + if args.no_icons:
|
| + return
|
| +
|
| + delete_old_icons()
|
| + for i, site in enumerate(popular_sites):
|
| + if lacks_required_keys(site):
|
| + print("Could not download large image for site: %r" % site)
|
| + continue
|
| + print("Downloading icon for '%r'..." % site[SITE_TITLE_KEY])
|
| + image = download_as_image(site[LARGE_ICON_KEY])
|
| + if not args.no_resizing:
|
| + resize_if_too_large(image, args.size)
|
| + image_name = SITE_ICON_FORMAT % i
|
| + image.save(os.path.join(NTP_TILES_RESOURCE_PATH, image_name), "PNG",
|
| + optimize=True)
|
| + print("... done. (Stored as " + image_name + ")");
|
| +
|
| +
|
| +if __name__ == "__main__":
|
| + main()
|
|
|