From 11d4ac13e417438c59597bb5c6202975c8d7809c Mon Sep 17 00:00:00 2001 From: Aneesh Agrawal Date: Tue, 18 Apr 2017 23:09:12 -0400 Subject: [PATCH 1/6] Backup archive.py state module from 2016.3 The newer version of this module will prefer to use the unzip command instead of the python zipfile module for unzipping. This works around https://bugs.python.org/issue15795, namely a bug where the zipfile module does not respect permissions. This will fix our Android toolchain `archive.extracted` states. Note that this version of the module does not include https://github.com/saltstack/salt/pull/36552, so keep the `archive_user` argument to `archive.extracted` states. `archive.extracted` sets ownership only on the `if_missing` path, so ensure Salt sets ownership on all the extracted files by unsetting `if_missing`, which defaults to `name`. This now refers to the top level extracted directory, so all extracted files will be properly owned by `servo/servo`, which ensures we are able to run the `android` command as the `servo` user to download the platform and build tools in the `cmd.run` state. Because the `unzip` binary will be used to extract the zip files, require the `android-dependencies` `pkg.installed` state from `archive.extracted` states that extract zip archives to ensure that the `unzip` binary is available. --- _states/archive.py | 569 +++++++++++++++++++++++++++ servo-build-dependencies/android.sls | 4 +- 2 files changed, 571 insertions(+), 2 deletions(-) create mode 100644 _states/archive.py diff --git a/_states/archive.py b/_states/archive.py new file mode 100644 index 00000000..5719d432 --- /dev/null +++ b/_states/archive.py @@ -0,0 +1,569 @@ +# This module is backported from Salt 2016.3 +# (at commit f4f3ee69bac5b17a80d09e18a37a00f0a6a7ff9c). +# No modifications have been made with the exception of +# the addition of this header comment. +# +# The original copyright and licensing notice for the methods from the +# archive.py state module is reproduced below in the double-# comment block: +# +## Copyright 2014-2016 SaltStack Team +## +## Licensed under the Apache License, Version 2.0 (the "License"); +## you may not use this file except in compliance with the License. +## You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. + +# -*- coding: utf-8 -*- +''' +Extract an archive + +.. versionadded:: 2014.1.0 +''' + +# Import Python libs +from __future__ import absolute_import +import re +import os +import logging +import tarfile +from contextlib import closing + +# Import 3rd-party libs +import salt.ext.six as six +from salt.ext.six.moves import shlex_quote as _cmd_quote +from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=no-name-in-module + +# Import salt libs +import salt.utils +from salt.exceptions import CommandExecutionError +import salt.utils +# remove after archive_user deprecation. +from salt.utils import warn_until + +log = logging.getLogger(__name__) + +__virtualname__ = 'archive' + + +def __virtual__(): + ''' + Only load if the archive module is available in __salt__ + ''' + if 'archive.unzip' in __salt__ and 'archive.unrar' in __salt__: + return __virtualname__ + else: + return False + + +def _update_checksum(fname, target, checksum): + lines = [] + compare_string = '{0}:{1}'.format(target, checksum) + if os.path.exists(fname): + with salt.utils.fopen(fname, 'r') as f: + lines = f.readlines() + with salt.utils.fopen(fname, 'w') as f: + f.write('{0}:{1}\n'.format(target, checksum)) + for line in lines: + if line.startswith(target): + continue + f.write(line) + + +def _compare_checksum(fname, target, checksum): + if os.path.exists(fname): + compare_string = '{0}:{1}'.format(target, checksum) + with salt.utils.fopen(fname, 'r') as f: + while True: + current_line = f.readline() + if not current_line: + break + if current_line.endswith('\n'): + current_line = current_line[:-1] + if compare_string == current_line: + return True + return False + + +def _is_bsdtar(): + return 'bsdtar' in __salt__['cmd.run'](['tar', '--version'], + python_shell=False) + + +def _cleanup_destdir(name): + ''' + Attempt to remove the specified directory + ''' + try: + os.rmdir(name) + except OSError: + pass + + +def extracted(name, + source, + archive_format, + archive_user=None, + password=None, + user=None, + group=None, + tar_options=None, + source_hash=None, + if_missing=None, + keep=False, + trim_output=False, + skip_verify=False, + source_hash_update=None): + ''' + .. versionadded:: 2014.1.0 + + State that make sure an archive is extracted in a directory. + The downloaded archive is erased if successfully extracted. + The archive is downloaded only if necessary. + + .. note:: + + If ``if_missing`` is not defined, this state will check for ``name`` + instead. If ``name`` exists, it will assume the archive was previously + extracted successfully and will not extract it again. + + Example, tar with flag for lmza compression: + + .. code-block:: yaml + + graylog2-server: + archive.extracted: + - name: /opt/ + - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma + - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6 + - tar_options: J + - archive_format: tar + - if_missing: /opt/graylog2-server-0.9.6p1/ + + Example, tar with flag for verbose output: + + .. code-block:: yaml + + graylog2-server: + archive.extracted: + - name: /opt/ + - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.gz + - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6 + - archive_format: tar + - tar_options: v + - user: root + - group: root + - if_missing: /opt/graylog2-server-0.9.6p1/ + + Example, tar with flag for lmza compression and update based if source_hash differs from what was + previously extracted: + + .. code-block:: yaml + + graylog2-server: + archive.extracted: + - name: /opt/ + - source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.lzma + - source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6 + - source_hash_update: true + - tar_options: J + - archive_format: tar + - if_missing: /opt/graylog2-server-0.9.6p1/ + + name + Location where archive should be extracted + + password + Password to use with password protected zip files. Currently only zip + files with passwords are supported. + + .. versionadded:: 2016.3.0 + + source + Archive source, same syntax as file.managed source argument. + + source_hash + Hash of source file, or file with list of hash-to-file mappings. + It uses the same syntax as the file.managed source_hash argument. + + source_hash_update + Set this to ``True`` if archive should be extracted if source_hash has + changed. This would extract regardless of the ``if_missing`` parameter. + + .. versionadded:: 2016.3.0 + + skip_verify:False + If ``True``, hash verification of remote file sources (``http://``, + ``https://``, ``ftp://``) will be skipped, and the ``source_hash`` + argument will be ignored. + + .. versionadded:: 2016.3.4 + + archive_format + ``tar``, ``zip`` or ``rar`` + + archive_user + The user to own each extracted file. + + .. deprecated:: 2014.7.2 + Replaced by ``user`` parameter + + user + The user to own each extracted file. + + .. versionadded:: 2015.8.0 + .. versionchanged:: 2016.3.0 + When used in combination with ``if_missing``, ownership will only + be enforced if ``if_missing`` is a directory. + + group + The group to own each extracted file. + + .. versionadded:: 2015.8.0 + .. versionchanged:: 2016.3.0 + When used in combination with ``if_missing``, ownership will only + be enforced if ``if_missing`` is a directory. + + if_missing + If specified, this path will be checked, and if it exists then the + archive will not be extracted. This can be helpful if the archive + extracts all files into a subfolder. This path can be either a + directory or a file, so this option can also be used to check for a + semaphore file and conditionally skip extraction. + + .. versionchanged:: 2016.3.0 + When used in combination with either ``user`` or ``group``, + ownership will only be enforced when ``if_missing`` is a directory. + + tar_options + If ``archive_format`` is set to ``tar``, this option can be used to + specify a string of additional arguments to pass to the tar command. If + ``archive_format`` is set to ``tar`` and this option is *not* used, + then the minion will attempt to use Python's native tarfile_ support to + extract it. Python's native tarfile_ support can only handle gzip and + bzip2 compression, however. + + .. versionchanged:: 2015.8.11,2016.3.2 + XZ-compressed archives no longer require ``J`` to manually be set + in the ``tar_options``, they are now detected automatically and + Salt will extract them using ``xz-utils``. This is a more + platform-independent solution, as not all tar implementations + support the ``J`` argument for extracting archives. + + .. note:: + Main operators like -x, --extract, --get, -c and -f/--file **should + not be used** here. + + Using this option means that the ``tar`` command will be used, + which is less platform-independent, so keep this in mind when using + this option; the options must be valid options for the ``tar`` + implementation on the minion's OS. + + .. _tarfile: https://docs.python.org/2/library/tarfile.html + + keep + Keep the archive in the minion's cache + + trim_output + The number of files we should output on success before the rest are + trimmed, if this is set to True then it will default to 100 + + .. versionadded:: 2016.3.0 + ''' + ret = {'name': name, 'result': None, 'changes': {}, 'comment': ''} + valid_archives = ('tar', 'rar', 'zip') + + if archive_format not in valid_archives: + ret['result'] = False + ret['comment'] = '{0} is not supported, valid formats are: {1}'.format( + archive_format, ','.join(valid_archives)) + return ret + + # remove this whole block after formal deprecation. + if archive_user is not None: + warn_until( + 'Carbon', + 'Passing \'archive_user\' is deprecated.' + 'Pass \'user\' instead.' + ) + if user is None: + user = archive_user + + if not name.endswith('/'): + name += '/' + + if __opts__['test']: + source_match = source + else: + try: + source_match = __salt__['file.source_list'](source, + source_hash, + __env__)[0] + except CommandExecutionError as exc: + ret['result'] = False + ret['comment'] = exc.strerror + return ret + + urlparsed_source = _urlparse(source_match) + source_hash_name = urlparsed_source.path or urlparsed_source.netloc + + source_is_local = urlparsed_source.scheme in ('', 'file') + if source_is_local: + # Get rid of "file://" from start of source_match + source_match = urlparsed_source.path + if not os.path.isfile(source_match): + ret['comment'] = 'Source file \'{0}\' does not exist'.format(source_match) + return ret + + if if_missing is None: + if_missing = name + if source_hash and source_hash_update: + if urlparsed_source.scheme != '': + ret['result'] = False + ret['comment'] = ( + '\'source_hash_update\' is not yet implemented for a remote ' + 'source_hash' + ) + return ret + else: + try: + hash_type, hsum = source_hash.split('=') + except ValueError: + ret['result'] = False + ret['comment'] = 'Invalid source_hash format' + return ret + source_file = '{0}.{1}'.format(os.path.basename(source), hash_type) + hash_fname = os.path.join(__opts__['cachedir'], + 'files', + __env__, + source_file) + if _compare_checksum(hash_fname, name, hsum): + ret['result'] = True + ret['comment'] = 'Hash {0} has not changed'.format(hsum) + return ret + elif ( + __salt__['file.directory_exists'](if_missing) + or __salt__['file.file_exists'](if_missing) + ): + ret['result'] = True + ret['comment'] = '{0} already exists'.format(if_missing) + return ret + + log.debug('Input seem valid so far') + if source_is_local: + filename = source_match + else: + filename = os.path.join( + __opts__['cachedir'], + 'files', + __env__, + '{0}.{1}'.format(re.sub('[:/\\\\]', '_', if_missing), archive_format)) + + if not source_is_local and not os.path.isfile(filename): + if __opts__['test']: + ret['result'] = None + ret['comment'] = \ + '{0} {1} would be downloaded to cache'.format( + 'One of' if not isinstance(source_match, six.string_types) + else 'Archive', + source_match + ) + return ret + + log.debug('%s is not in cache, downloading it', source_match) + + file_result = __states__['file.managed'](filename, + source=source_match, + source_hash=source_hash, + makedirs=True, + skip_verify=skip_verify, + source_hash_name=source_hash_name) + log.debug('file.managed: {0}'.format(file_result)) + # get value of first key + try: + file_result = file_result[next(six.iterkeys(file_result))] + except AttributeError: + pass + + try: + if not file_result['result']: + log.debug('failed to download {0}'.format(source)) + return file_result + except TypeError: + if not file_result: + log.debug('failed to download {0}'.format(source)) + return file_result + else: + log.debug('Archive %s is already in cache', source) + + if __opts__['test']: + ret['result'] = None + ret['comment'] = '{0} {1} would be extracted to {2}'.format( + 'One of' if not isinstance(source_match, six.string_types) + else 'Archive', + source_match, + name + ) + return ret + + created_destdir = False + if __salt__['file.file_exists'](name.rstrip('/')): + ret['result'] = False + ret['comment'] = ('{0} exists and is not a directory' + .format(name.rstrip('/'))) + return ret + elif not __salt__['file.directory_exists'](name): + __salt__['file.makedirs'](name, user=archive_user) + created_destdir = True + + log.debug('Extracting {0} to {1}'.format(filename, name)) + if archive_format == 'zip': + if password is None and salt.utils.which('unzip'): + files = __salt__['archive.cmd_unzip'](filename, name, trim_output=trim_output) + else: + # https://bugs.python.org/issue15795 + if password is not None: + log.warning('Password supplied: using archive.unzip') + if not salt.utils.which('unzip'): + log.warning('Cannot find unzip command for archive.cmd_unzip:' + ' using archive.unzip instead') + files = __salt__['archive.unzip'](filename, name, trim_output=trim_output, password=password) + elif archive_format == 'rar': + files = __salt__['archive.unrar'](filename, name, trim_output=trim_output) + else: + if tar_options is None: + try: + with closing(tarfile.open(filename, 'r')) as tar: + files = tar.getnames() + tar.extractall(name) + except tarfile.ReadError: + if salt.utils.which('xz'): + if __salt__['cmd.retcode'](['xz', '-l', filename], + python_shell=False, + ignore_retcode=True) == 0: + # XZ-compressed data + log.debug( + 'Tar file is XZ-compressed, attempting ' + 'decompression and extraction using xz-utils ' + 'and the tar command' + ) + # Must use python_shell=True here because not all tar + # implementations support the -J flag for decompressing + # XZ-compressed data. We need to dump the decompressed + # data to stdout and pipe it to tar for extraction. + cmd = 'xz --decompress --stdout {0} | tar xvf -' + results = __salt__['cmd.run_all']( + cmd.format(_cmd_quote(filename)), + cwd=name, + python_shell=True) + if results['retcode'] != 0: + if created_destdir: + _cleanup_destdir(name) + ret['result'] = False + ret['changes'] = results + return ret + if _is_bsdtar(): + files = results['stderr'] + else: + files = results['stdout'] + else: + # Failed to open tar archive and it is not + # XZ-compressed, gracefully fail the state + if created_destdir: + _cleanup_destdir(name) + ret['result'] = False + ret['comment'] = ( + 'Failed to read from tar archive using Python\'s ' + 'native tar file support. If archive is ' + 'compressed using something other than gzip or ' + 'bzip2, the \'tar_options\' parameter may be ' + 'required to pass the correct options to the tar ' + 'command in order to extract the archive.' + ) + return ret + else: + if created_destdir: + _cleanup_destdir(name) + ret['result'] = False + ret['comment'] = ( + 'Failed to read from tar archive. If it is ' + 'XZ-compressed, install xz-utils to attempt ' + 'extraction.' + ) + return ret + else: + try: + tar_opts = tar_options.split(' ') + except AttributeError: + tar_opts = str(tar_options).split(' ') + + tar_cmd = ['tar'] + tar_shortopts = 'x' + tar_longopts = [] + + for position, opt in enumerate(tar_opts): + if opt.startswith('-'): + tar_longopts.append(opt) + else: + if position > 0: + tar_longopts.append(opt) + else: + append_opt = opt + append_opt = append_opt.replace('x', '').replace('f', '') + tar_shortopts = tar_shortopts + append_opt + + tar_cmd.append(tar_shortopts) + tar_cmd.extend(tar_longopts) + tar_cmd.extend(['-f', filename]) + + results = __salt__['cmd.run_all'](tar_cmd, cwd=name, python_shell=False) + if results['retcode'] != 0: + ret['result'] = False + ret['changes'] = results + return ret + if _is_bsdtar(): + files = results['stderr'] + else: + files = results['stdout'] + if not files: + files = 'no tar output so far' + + # Recursively set user and group ownership of files after extraction. + # Note: We do this here because we might not have access to the cachedir. + if user or group: + if os.path.isdir(if_missing): + recurse = [] + if user: + recurse.append('user') + if group: + recurse.append('group') + dir_result = __states__['file.directory'](if_missing, + user=user, + group=group, + recurse=recurse) + log.debug('file.directory: %s', dir_result) + elif os.path.isfile(if_missing): + log.debug('if_missing (%s) is a file, not enforcing user/group ' + 'permissions', if_missing) + + if len(files) > 0: + ret['result'] = True + ret['changes']['directories_created'] = [name] + ret['changes']['extracted_files'] = files + ret['comment'] = '{0} extracted to {1}'.format(source_match, name) + if not source_is_local and not keep: + os.unlink(filename) + if source_hash and source_hash_update: + _update_checksum(hash_fname, name, hash[1]) + + else: + __salt__['file.remove'](if_missing) + ret['result'] = False + ret['comment'] = 'Can\'t extract content of {0}'.format(source_match) + return ret diff --git a/servo-build-dependencies/android.sls b/servo-build-dependencies/android.sls index 8489de61..ad500e52 100644 --- a/servo-build-dependencies/android.sls +++ b/servo-build-dependencies/android.sls @@ -65,8 +65,8 @@ android-sdk: - archive_user: servo - user: servo - group: servo - - if_missing: {{ common.servo_home }}/android/sdk/{{ android.sdk.version }}/android-sdk-linux - require: + - pkg: android-dependencies - user: servo cmd.run: - name: | @@ -107,8 +107,8 @@ android-ndk: - archive_user: servo - user: servo - group: servo - - if_missing: {{ common.servo_home }}/android/ndk/{{ android.ndk.version }}/android-ndk-{{ android.ndk.version }} - require: + - pkg: android-dependencies - user: servo android-ndk-current: From e2a50d5fa26325da65b90ee1119f7efb8b4e58b5 Mon Sep 17 00:00:00 2001 From: Imanol Fernandez Date: Mon, 13 Mar 2017 22:54:47 +0100 Subject: [PATCH 2/6] Concurrently install Android r25.2.3 tools Install a new version of the Android tools along with the old one. Keep the old version as the current version for now to avoid breaking builds, but still make the new version available. This will also make future upgrades more smooth as well. Switch to the new URL scheme for SDK downloads, which also has switched to using zip files instead of .tar.gz files. Note that the new packaging format no longer has a top-level `android-sdk-linux` folder. Move the `build_tools` and `platform` attributes under `sdk`, as they are only used with the SDK-related states. Reinstate `if_missing` for the SDK `archive.extracted` state to ensure that the new archive will be extracted if the old one is present, so that the SDK has the correct directory layout. This also means ownership will only be updated for that file (the `android` binary), so add a `file.directory` state to handle fixing the ownership for the rest of the extracted files, and fixup the requisites as necessary. --- servo-build-dependencies/android.sls | 40 +++++++++++++++++++--------- servo-build-dependencies/map.jinja | 15 ++++++++--- 2 files changed, 38 insertions(+), 17 deletions(-) diff --git a/servo-build-dependencies/android.sls b/servo-build-dependencies/android.sls index ad500e52..c257dff9 100644 --- a/servo-build-dependencies/android.sls +++ b/servo-build-dependencies/android.sls @@ -54,25 +54,38 @@ android-dependencies: - require: - pkg: pip - -android-sdk: +{% for version, sdk in android.sdk.items() if version != 'current' %} +android-sdk-{{ version }}: archive.extracted: - - name: {{ common.servo_home }}/android/sdk/{{ android.sdk.version }} - - source: https://dl.google.com/android/android-sdk_{{ android.sdk.version }}-linux.tgz - - source_hash: sha512={{ android.sdk.sha512 }} - - archive_format: tar + - name: {{ common.servo_home }}/android/sdk/{{ version }} + - source: https://dl.google.com/android/repository/tools_{{ version }}-linux.zip + - source_hash: sha512={{ sdk.sha512 }} + - archive_format: zip # Workaround for https://github.com/saltstack/salt/pull/36552 - archive_user: servo - user: servo - group: servo + # Use this to ensure the SDK on disk has the correct directory layout, + # and use the subsequent `file.directory` state to fix ownership. + - if_missing: {{ common.servo_home }}/android/sdk/{{ version }}/tools/android + - require: + - user: servo + file.directory: + - name: {{ common.servo_home }}/android/sdk/{{ version }} + - user: servo + - group: servo + - recurse: + - user + - group - require: - pkg: android-dependencies - user: servo + - archive: android-sdk-{{ version }} cmd.run: - name: | expect -c ' set timeout -1; - spawn {{ common.servo_home }}/android/sdk/{{ android.sdk.version }}/android-sdk-linux/tools/android - update sdk --no-ui --all --filter platform-tools,android-{{ android.platform }},build-tools-{{ android.build_tools }}; + spawn {{ common.servo_home }}/android/sdk/{{ version }}/tools/android - update sdk --no-ui --all --filter platform-tools,android-{{ sdk.platform }},build-tools-{{ sdk.build_tools }}; expect { "Do you accept the license" { exp_send "y\r" ; exp_continue } eof @@ -80,21 +93,22 @@ android-sdk: ' - runas: servo - creates: - - {{ common.servo_home }}/android/sdk/{{ android.sdk.version }}/android-sdk-linux/platform-tools - - {{ common.servo_home }}/android/sdk/{{ android.sdk.version }}/android-sdk-linux/platforms/android-{{ android.platform }} - - {{ common.servo_home }}/android/sdk/{{ android.sdk.version }}/android-sdk-linux/build-tools/{{ android.build_tools }} + - {{ common.servo_home }}/android/sdk/{{ version }}/platform-tools + - {{ common.servo_home }}/android/sdk/{{ version }}/platforms/android-{{ sdk.platform }} + - {{ common.servo_home }}/android/sdk/{{ version }}/build-tools/{{ sdk.build_tools }} - require: - pkg: android-dependencies - - archive: android-sdk + - file: android-sdk-{{ version }} +{% endfor %} android-sdk-current: file.symlink: - name: {{ common.servo_home }}/android/sdk/current - - target: {{ common.servo_home }}/android/sdk/{{ android.sdk.version }}/android-sdk-linux + - target: {{ common.servo_home }}/android/sdk/{{ android.sdk.current }} - user: servo - group: servo - require: - - cmd: android-sdk + - cmd: android-sdk-{{ android.sdk.current }} android-ndk: diff --git a/servo-build-dependencies/map.jinja b/servo-build-dependencies/map.jinja index ceb2c02d..00869ea5 100644 --- a/servo-build-dependencies/map.jinja +++ b/servo-build-dependencies/map.jinja @@ -1,10 +1,17 @@ {% set android = { - 'build_tools': '23.0.3', - 'platform': '18', 'sdk': { - 'version': 'r24.4.1', - 'sha512': '96fb71d78a8c2833afeba6df617edcd6cc4e37ecd0c3bec38c39e78204ed3c2bd54b138a56086bf5ccd95e372e3c36e72c1550c13df8232ec19537da93049284' + 'current': 'r24.4.1', + 'r24.4.1': { + 'build_tools': '23.0.3', + 'platform': '18', + 'sha512': '778f43ab1d220bdc30087759862d2c0f3fa59e44122cc99666d57f37f69d46b14e4b8583e20234e23aaed4e291f5a21e607d08f69a2a9e73332889091054b8c9', + }, + 'r25.2.3': { + 'build_tools': '25.0.2', + 'platform': '25', + 'sha512': 'dfc30ee3e2714cf8008ab1f99757deded002d21b23a8d2ab07952e1afd1c93124ddec06660babf6a46c54e5e7e135c8c0cb4cc512378a8509da074dbf7e253d7' + } }, 'ndk': { 'version': 'r12b', From f1df6337a7241762576e1c9794a4c46539734505 Mon Sep 17 00:00:00 2001 From: Aneesh Agrawal Date: Wed, 19 Apr 2017 19:22:46 -0400 Subject: [PATCH 3/6] Add prereq to clean out old SDK when upgrading The SDK layout has changed due to the new packaging format, so simply extracting the new archive over the old one will leave extraneous files around. To fix this, if we expect to make changes in the `archive.extracted` state, then completely remove the existing version of the SDK. Also, unlink the `current` symlink before making changes to an SDK to avoid processes inadverently using the SDK while it is being changed (the symlink will be restored once the SDK is done being updated.) --- servo-build-dependencies/android.sls | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/servo-build-dependencies/android.sls b/servo-build-dependencies/android.sls index c257dff9..d105720b 100644 --- a/servo-build-dependencies/android.sls +++ b/servo-build-dependencies/android.sls @@ -55,6 +55,12 @@ android-dependencies: - pkg: pip {% for version, sdk in android.sdk.items() if version != 'current' %} +android-sdk-{{ version }}-purge: + file.absent: + - name: {{ common.servo_home }}/android/sdk/{{ version }} + - prereq: + - archive: android-sdk-{{ version }} + android-sdk-{{ version }}: archive.extracted: - name: {{ common.servo_home }}/android/sdk/{{ version }} @@ -101,6 +107,14 @@ android-sdk-{{ version }}: - file: android-sdk-{{ version }} {% endfor %} +android-sdk-current-unlink: + file.absent: + - name: {{ common.servo_home }}/android/sdk/current + - prereq: + - archive: android-sdk-{{ android.sdk.current }} + - require_in: + - file: android-sdk-{{ android.sdk.current }}-purge + android-sdk-current: file.symlink: - name: {{ common.servo_home }}/android/sdk/current From ec3ed351fa1e375a4e1cdd5a4fc6c691337f7ae6 Mon Sep 17 00:00:00 2001 From: Aneesh Agrawal Date: Wed, 19 Apr 2017 13:50:11 -0400 Subject: [PATCH 4/6] Update expect script to propagate return codes Unfortunately, the `android` tool always returns a 0 exit code even in case of failure, so this does not yet catch failures. However, the new `sdkmanager` tool seems to return appropriate exit codes, so this will kick in in the future. This will prevent errors from passing silently, such as bad executable bits or incorrect file permissions. --- servo-build-dependencies/android.sls | 2 ++ 1 file changed, 2 insertions(+) diff --git a/servo-build-dependencies/android.sls b/servo-build-dependencies/android.sls index d105720b..9a749189 100644 --- a/servo-build-dependencies/android.sls +++ b/servo-build-dependencies/android.sls @@ -96,6 +96,8 @@ android-sdk-{{ version }}: "Do you accept the license" { exp_send "y\r" ; exp_continue } eof } + catch wait result + exit [lindex $result 3] ' - runas: servo - creates: From b9fbecd7bb8035585a495322230ee8d9c15255a4 Mon Sep 17 00:00:00 2001 From: Aneesh Agrawal Date: Wed, 19 Apr 2017 21:31:40 -0400 Subject: [PATCH 5/6] Add smoke test that Android SDK is installed properly The Android SDK installation states often have tricky changes due to upstream changes by Google, and additionally often fail silently in various different ways e.g. because the `android` tool always has a zero exit status. Additionally, some failures are not evident until Servo builds themselves start failing. Add a somke test in saltfs to catch common problems early on, and automatically run it on Travis. --- .travis/dispatch.sh | 3 + requirements.txt | 1 + .../sls/servo-build-dependencies/__init__.py | 0 .../android/__init__.py | 0 .../servo-build-dependencies/android/sdk.py | 88 +++++++++++++++++++ 5 files changed, 92 insertions(+) create mode 100644 tests/sls/servo-build-dependencies/__init__.py create mode 100644 tests/sls/servo-build-dependencies/android/__init__.py create mode 100644 tests/sls/servo-build-dependencies/android/sdk.py diff --git a/.travis/dispatch.sh b/.travis/dispatch.sh index b6639243..8c232cf0 100755 --- a/.travis/dispatch.sh +++ b/.travis/dispatch.sh @@ -72,6 +72,9 @@ else if [[ "${SALT_NODE_ID}" == "servo-master1" ]]; then ./test.py sls.buildbot.master sls.homu sls.nginx fi + if [[ "${SALT_NODE_ID}" == "servo-linux-cross1" ]]; then + ./test.py sls.servo-build-dependencies.android + fi # Salt doesn't support timezone.system on OSX # See https://github.com/saltstack/salt/issues/31345 diff --git a/requirements.txt b/requirements.txt index 2fddb657..968fc6b6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ # (Travis-CI will auto-install them from this file) flake8 == 2.5.4 toml == 0.9.1 # Please ensure this is in sync with homu/init.sls +jinja2 == 2.9.6 # TODO: sync this with version used by Salt diff --git a/tests/sls/servo-build-dependencies/__init__.py b/tests/sls/servo-build-dependencies/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/sls/servo-build-dependencies/android/__init__.py b/tests/sls/servo-build-dependencies/android/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/sls/servo-build-dependencies/android/sdk.py b/tests/sls/servo-build-dependencies/android/sdk.py new file mode 100644 index 00000000..0622283d --- /dev/null +++ b/tests/sls/servo-build-dependencies/android/sdk.py @@ -0,0 +1,88 @@ +import os.path + +from jinja2 import Template + +from tests.util import Failure, Success, project_path + +BASE_DIR = '/home/servo/android/sdk' +CHECKS = { + '{}/android-sdk-linux': {'type': 'absent'}, + '{}/tools/android': {'type': 'file', 'executable': True}, + '{}/platform-tools': {'type': 'directory'}, + '{}/platforms/android-{platform}': {'type': 'directory'}, + '{}/build-tools/{build_tools}': {'type': 'directory'}, +} + + +def has_perms(perms, stat_info): + return perms == stat_info & perms + + +def run(): + with open(os.path.join( + project_path(), + 'servo-build-dependencies', + 'map.jinja' + )) as jinja_file: + template = Template(jinja_file.read()) + + sdk_vars = template.module.android['sdk'] + failures = [] + checks = {} + for version, sdk in sdk_vars.items(): + if version == 'current': + if sdk not in sdk_vars: + failures.append( + 'The current SDK is not pointed at any installed SDK' + ) + continue + checks[os.path.join(BASE_DIR, 'current')] = { + 'type': 'link', + 'target': os.path.join(BASE_DIR, sdk) + } + sdk = sdk_vars[sdk] + for path_template, spec in CHECKS.items(): + path = path_template.format(os.path.join(BASE_DIR, version), **sdk) + checks[path] = spec + + for path, spec in sorted(checks.items(), key=lambda kv: kv[0]): + exists = os.path.lexists(path) + if spec['type'] == 'absent': + if exists: + failures.append('{} should not exist'.format(path)) + continue + if not exists: + failures.append('{} does not exist but should'.format(path)) + continue + info = os.stat(path).st_mode + if spec['type'] == 'directory': + if not (os.path.isdir(path) and not os.path.islink(path)): + failures.append('{} should be a directory'.format(path)) + if not has_perms(0o700, info): + failures.append( + '{} should have at least perms 700'.format(path) + ) + elif spec['type'] == 'file': + if not (os.path.isfile(path) and not os.path.islink(path)): + failures.append('{} should be a file'.format(path)) + perms = 0o700 if spec['executable'] else 0o600 + if not has_perms(perms, info): + failures.append( + '{} should have at least perms {:o}'.format(path, perms) + ) + elif spec['type'] == 'link': + if not os.path.islink(path): + failures.append('{} should be a symlink'.format(path)) + if not os.path.realpath(path) == spec['target']: + failures.append( + '{} should be a link to {}'.format(path, spec['target']) + ) + + else: + failures.append('Unknown spec for path {}'.format(path)) + + if failures: + output = '\n'.join(('- {}'.format(f) for f in failures)) + return Failure('Android SDK(s) not installed properly:', output) + + return Success('Android SDK(s) are properly installed') From c82043c151494a189835c375c18659db8333bc85 Mon Sep 17 00:00:00 2001 From: Aneesh Agrawal Date: Thu, 20 Apr 2017 00:51:34 -0400 Subject: [PATCH 6/6] Invalidate entire Salt cache on Travis This will clear the entire cache in between runs when running with SALT_FROM_SCRATCH=false. Originally, only sls and other files in the state tree were cleared, but since external modules and other files also need to be cleared, just clear everything for robustness. Additionally, move this to after we checkout the new code, since we sync everything after invalidating the cache, and we want to sync the new configuration instead of the old one. --- .travis/dispatch.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis/dispatch.sh b/.travis/dispatch.sh index 8c232cf0..1da063f2 100755 --- a/.travis/dispatch.sh +++ b/.travis/dispatch.sh @@ -58,12 +58,13 @@ else run_salt 'old' set -o errexit + git checkout "${TRAVIS_COMMIT}" + travis_fold_start "salt.invalidate_cache" 'Invalidating the Salt cache' - rm -rf /var/cache/salt/minion/files/base/* + salt_call 'saltutil.clear_cache' salt_call 'saltutil.sync_all' travis_fold_end "salt.invalidate_cache" - git checkout "${TRAVIS_COMMIT}" run_salt 'upgrade' fi