2018-02-01 12:42:07 +00:00
|
|
|
#! /usr/bin/env nix-shell
|
2019-04-25 11:50:33 +01:00
|
|
|
#! nix-shell -i python3 -p "python3.withPackages (ps: with ps; [ attrs ])
|
2018-02-01 12:42:07 +00:00
|
|
|
#
|
2018-05-26 15:37:37 +01:00
|
|
|
# This script downloads Home Assistant's source tarball.
|
2019-04-25 11:50:33 +01:00
|
|
|
# Inside the homeassistant/components directory, each integration has an associated manifest.json,
|
|
|
|
# specifying required packages and other integrations it depends on:
|
2018-02-01 12:42:07 +00:00
|
|
|
#
|
2019-04-25 11:50:33 +01:00
|
|
|
# {
|
|
|
|
# "requirements": [ "package==1.2.3" ],
|
|
|
|
# "dependencies": [ "component" ]
|
|
|
|
# }
|
2018-02-01 12:42:07 +00:00
|
|
|
#
|
2019-04-25 11:50:33 +01:00
|
|
|
# By parsing the files, a dictionary mapping integrations to requirements and dependencies is created.
|
2018-05-26 15:37:37 +01:00
|
|
|
# For all of these requirements and the dependencies' requirements,
|
2019-04-25 11:50:33 +01:00
|
|
|
# nixpkgs' python3Packages are searched for appropriate names.
|
|
|
|
# Then, a Nix attribute set mapping integration name to dependencies is created.
|
2018-02-01 12:42:07 +00:00
|
|
|
|
2018-05-25 16:14:30 +01:00
|
|
|
from io import BytesIO
|
2018-02-01 12:42:07 +00:00
|
|
|
import json
|
2019-04-25 11:50:33 +01:00
|
|
|
import pathlib
|
|
|
|
import os
|
2018-02-01 12:42:07 +00:00
|
|
|
import re
|
2019-04-25 11:50:33 +01:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
|
|
|
import tarfile
|
|
|
|
from urllib.request import urlopen
|
2018-02-01 12:42:07 +00:00
|
|
|
|
2018-05-25 16:14:30 +01:00
|
|
|
COMPONENT_PREFIX = 'homeassistant.components'
|
2018-02-01 12:42:07 +00:00
|
|
|
PKG_SET = 'python3Packages'
|
|
|
|
|
2018-05-22 13:48:55 +01:00
|
|
|
# If some requirements are matched by multiple python packages,
|
|
|
|
# the following can be used to choose one of them
|
|
|
|
PKG_PREFERENCES = {
|
|
|
|
# Use python3Packages.youtube-dl-light instead of python3Packages.youtube-dl
|
2019-09-25 01:07:30 +01:00
|
|
|
'youtube-dl': 'youtube-dl-light',
|
|
|
|
'tensorflow-bin': 'tensorflow',
|
|
|
|
'tensorflowWithoutCuda': 'tensorflow'
|
2018-05-22 13:48:55 +01:00
|
|
|
}
|
|
|
|
|
2018-02-01 12:42:07 +00:00
|
|
|
def get_version():
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + '/default.nix') as f:
|
2019-03-04 16:17:33 +00:00
|
|
|
# A version consists of digits, dots, and possibly a "b" (for beta)
|
|
|
|
m = re.search('hassVersion = "([\\d\\.b]+)";', f.read())
|
2018-02-01 12:42:07 +00:00
|
|
|
return m.group(1)
|
|
|
|
|
2018-05-25 16:14:30 +01:00
|
|
|
def parse_components(version='master'):
|
|
|
|
components = {}
|
|
|
|
with tempfile.TemporaryDirectory() as tmp:
|
2019-04-25 11:50:33 +01:00
|
|
|
with urlopen(f'https://github.com/home-assistant/home-assistant/archive/{version}.tar.gz') as response:
|
2018-05-25 16:14:30 +01:00
|
|
|
tarfile.open(fileobj=BytesIO(response.read())).extractall(tmp)
|
|
|
|
# Use part of a script from the Home Assistant codebase
|
2019-04-25 11:50:33 +01:00
|
|
|
sys.path.append(os.path.join(tmp, f'home-assistant-{version}'))
|
|
|
|
from script.hassfest.model import Integration
|
|
|
|
integrations = Integration.load_dir(pathlib.Path(
|
|
|
|
os.path.join(tmp, f'home-assistant-{version}', 'homeassistant/components')
|
|
|
|
))
|
|
|
|
for domain in sorted(integrations):
|
|
|
|
integration = integrations[domain]
|
|
|
|
components[domain] = integration.manifest
|
2018-05-25 16:14:30 +01:00
|
|
|
return components
|
|
|
|
|
|
|
|
# Recursively get the requirements of a component and its dependencies
|
|
|
|
def get_reqs(components, component):
|
|
|
|
requirements = set(components[component]['requirements'])
|
|
|
|
for dependency in components[component]['dependencies']:
|
|
|
|
requirements.update(get_reqs(components, dependency))
|
2018-02-01 12:42:07 +00:00
|
|
|
return requirements
|
|
|
|
|
|
|
|
# Store a JSON dump of Nixpkgs' python3Packages
|
|
|
|
output = subprocess.check_output(['nix-env', '-f', os.path.dirname(sys.argv[0]) + '/../../..', '-qa', '-A', PKG_SET, '--json'])
|
|
|
|
packages = json.loads(output)
|
|
|
|
|
|
|
|
def name_to_attr_path(req):
|
2018-05-22 13:48:55 +01:00
|
|
|
attr_paths = set()
|
2018-02-10 22:18:41 +00:00
|
|
|
names = [req]
|
|
|
|
# E.g. python-mpd2 is actually called python3.6-mpd2
|
|
|
|
# instead of python-3.6-python-mpd2 inside Nixpkgs
|
2018-04-08 10:59:07 +01:00
|
|
|
if req.startswith('python-') or req.startswith('python_'):
|
2018-02-10 22:18:41 +00:00
|
|
|
names.append(req[len('python-'):])
|
|
|
|
for name in names:
|
2018-04-08 10:59:07 +01:00
|
|
|
# treat "-" and "_" equally
|
|
|
|
name = re.sub('[-_]', '[-_]', name)
|
2018-02-10 22:18:41 +00:00
|
|
|
pattern = re.compile('^python\\d\\.\\d-{}-\\d'.format(name), re.I)
|
|
|
|
for attr_path, package in packages.items():
|
|
|
|
if pattern.match(package['name']):
|
2018-05-22 13:48:55 +01:00
|
|
|
attr_paths.add(attr_path)
|
|
|
|
if len(attr_paths) > 1:
|
|
|
|
for to_replace, replacement in PKG_PREFERENCES.items():
|
|
|
|
try:
|
|
|
|
attr_paths.remove(PKG_SET + '.' + to_replace)
|
|
|
|
attr_paths.add(PKG_SET + '.' + replacement)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2018-02-01 12:42:07 +00:00
|
|
|
# Let's hope there's only one derivation with a matching name
|
2018-11-10 12:42:15 +00:00
|
|
|
assert len(attr_paths) <= 1, "{} matches more than one derivation: {}".format(req, attr_paths)
|
2018-05-22 13:48:55 +01:00
|
|
|
if len(attr_paths) == 1:
|
|
|
|
return attr_paths.pop()
|
2018-02-01 12:42:07 +00:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
version = get_version()
|
2018-02-10 22:18:41 +00:00
|
|
|
print('Generating component-packages.nix for version {}'.format(version))
|
2018-05-25 16:14:30 +01:00
|
|
|
components = parse_components(version=version)
|
2018-02-01 12:42:07 +00:00
|
|
|
build_inputs = {}
|
2018-05-25 16:14:30 +01:00
|
|
|
for component in sorted(components.keys()):
|
2018-02-01 12:42:07 +00:00
|
|
|
attr_paths = []
|
2018-05-25 18:37:12 +01:00
|
|
|
for req in sorted(get_reqs(components, component)):
|
2018-05-25 18:41:55 +01:00
|
|
|
# Some requirements are specified by url, e.g. https://example.org/foobar#xyz==1.0.0
|
|
|
|
# Therefore, if there's a "#" in the line, only take the part after it
|
|
|
|
req = req[req.find('#') + 1:]
|
2018-05-22 13:48:55 +01:00
|
|
|
name = req.split('==')[0]
|
|
|
|
attr_path = name_to_attr_path(name)
|
|
|
|
if attr_path is not None:
|
|
|
|
# Add attribute path without "python3Packages." prefix
|
|
|
|
attr_paths.append(attr_path[len(PKG_SET + '.'):])
|
2018-02-01 12:42:07 +00:00
|
|
|
else:
|
|
|
|
build_inputs[component] = attr_paths
|
|
|
|
|
|
|
|
with open(os.path.dirname(sys.argv[0]) + '/component-packages.nix', 'w') as f:
|
2018-05-25 16:14:30 +01:00
|
|
|
f.write('# Generated by parse-requirements.py\n')
|
2018-02-01 12:42:07 +00:00
|
|
|
f.write('# Do not edit!\n\n')
|
|
|
|
f.write('{\n')
|
|
|
|
f.write(' version = "{}";\n'.format(version))
|
|
|
|
f.write(' components = {\n')
|
|
|
|
for component, attr_paths in build_inputs.items():
|
|
|
|
f.write(' "{}" = ps: with ps; [ '.format(component))
|
|
|
|
f.write(' '.join(attr_paths))
|
|
|
|
f.write(' ];\n')
|
|
|
|
f.write(' };\n')
|
|
|
|
f.write('}\n')
|