|
| 1 | +import argparse |
| 2 | +import contextlib |
| 3 | +import json |
| 4 | +import os |
| 5 | +import platform |
| 6 | +import time |
| 7 | +import urllib.request |
| 8 | + |
| 9 | +from identify import identify |
| 10 | +import jsonschema |
| 11 | +import ruamel.yaml |
| 12 | + |
| 13 | +yaml = ruamel.yaml.YAML(typ="safe") |
| 14 | + |
| 15 | + |
| 16 | +sysname = platform.system() |
| 17 | +# on windows, try to get the appdata env var |
| 18 | +# this *could* result in CACHE_DIR=None, which is fine, just skip caching in |
| 19 | +# that case |
| 20 | +if sysname == "Windows": |
| 21 | + CACHE_DIR = os.getenv("LOCALAPPDATA", os.getenv("APPDATA")) |
| 22 | +# macOS -> app support dir |
| 23 | +elif sysname == "Darwin": |
| 24 | + CACHE_DIR = os.path.expanduser("~/Library/Application Support") |
| 25 | +# default for unknown platforms, namely linux behavior |
| 26 | +# use XDG env var and default to ~/.cache/ |
| 27 | +else: |
| 28 | + CACHE_DIR = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache/")) |
| 29 | + |
| 30 | +if CACHE_DIR: |
| 31 | + CACHE_DIR = os.path.join(CACHE_DIR, "jsonschema_validate") |
| 32 | + |
| 33 | + |
| 34 | +@contextlib.contextmanager |
| 35 | +def cached_open(file_url, filename): |
| 36 | + if not CACHE_DIR: |
| 37 | + with urllib.request.urlopen(file_url) as fp: |
| 38 | + yield fp |
| 39 | + else: |
| 40 | + try: |
| 41 | + os.makedirs(CACHE_DIR) |
| 42 | + except FileExistsError: |
| 43 | + pass |
| 44 | + |
| 45 | + if not filename: |
| 46 | + filename = file_url.split("/")[-1] |
| 47 | + dest = os.path.join(CACHE_DIR, filename) |
| 48 | + |
| 49 | + # connect, but don't read yet |
| 50 | + conn = urllib.request.urlopen(file_url) |
| 51 | + |
| 52 | + # download and cache to disk based on the mtime of the local file if it |
| 53 | + # exists (check mtime before download for speed) or just download if missing |
| 54 | + do_download = True |
| 55 | + if os.path.exists(dest): |
| 56 | + # get both timestamps as epoch times |
| 57 | + local_mtime = os.path.getmtime(dest) |
| 58 | + remote_mtime = time.mktime( |
| 59 | + time.strptime(conn.headers["last-modified"], "%a, %d %b %Y %H:%M:%S %Z") |
| 60 | + ) |
| 61 | + do_download = local_mtime < remote_mtime |
| 62 | + if do_download: |
| 63 | + with open(dest, "wb") as fp: |
| 64 | + fp.write(conn.read()) |
| 65 | + |
| 66 | + conn.close() |
| 67 | + |
| 68 | + with open(dest, "r") as fp: |
| 69 | + yield fp |
| 70 | + |
| 71 | + |
| 72 | +def main(): |
| 73 | + parser = argparse.ArgumentParser() |
| 74 | + parser.add_argument( |
| 75 | + "--schemafile", |
| 76 | + required=True, |
| 77 | + help=( |
| 78 | + "REQUIRED. " |
| 79 | + "The path to a file containing the jsonschema to use or an " |
| 80 | + "HTTP(S) URI for the schema. If a remote file is used, " |
| 81 | + "it will be downloaded and cached locally based on mtime." |
| 82 | + ), |
| 83 | + ) |
| 84 | + parser.add_argument( |
| 85 | + "--cache-filename", |
| 86 | + help=( |
| 87 | + "The name to use for caching a remote schema. " |
| 88 | + "Defaults to the last slash-delimited part of the URI." |
| 89 | + ), |
| 90 | + ) |
| 91 | + parser.add_argument("instancefiles", nargs="+", help="JSON or YAML files to check.") |
| 92 | + args = parser.parse_args() |
| 93 | + |
| 94 | + if args.schemafile.startswith("https://") or args.schemafile.startswith("http://"): |
| 95 | + with cached_open(args.schemafile, args.cache_filename) as fp: |
| 96 | + schema = json.load(fp) |
| 97 | + else: |
| 98 | + with open(args.schemafile) as f: |
| 99 | + schema = json.load(f) |
| 100 | + |
| 101 | + for instancefile in args.instancefiles: |
| 102 | + tags = identify.tags_from_path(instancefile) |
| 103 | + if "yaml" in tags: |
| 104 | + loader = yaml.load |
| 105 | + elif "json" in tags: |
| 106 | + loader = json.load |
| 107 | + else: |
| 108 | + raise ValueError( |
| 109 | + f"cannot check {instancefile} as it is neither yaml nor json" |
| 110 | + ) |
| 111 | + with open(instancefile) as f: |
| 112 | + doc = loader(f) |
| 113 | + |
| 114 | + jsonschema.validate(instance=doc, schema=schema) |
| 115 | + print("ok -- validation done") |
0 commit comments