You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

531 lines
19 KiB

#!/usr/bin/env python3
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Atest tool functions."""
# pylint: disable=line-too-long
from __future__ import print_function
import json
import logging
import os
import pickle
import re
import shutil
import subprocess
import sys
import tempfile
import time
from pathlib import Path
from atest import atest_utils as au
from atest import constants
from atest.atest_enum import DetectType, ExitCode
from atest.metrics import metrics, metrics_utils
UPDATEDB = 'updatedb'
LOCATE = 'locate'
ACLOUD_DURATION = 'duration'
SEARCH_TOP = os.getenv(constants.ANDROID_BUILD_TOP, '')
# When adding new index, remember to append constants to below tuple.
INDEXES = (constants.CC_CLASS_INDEX,
constants.CLASS_INDEX,
constants.LOCATE_CACHE,
constants.PACKAGE_INDEX,
constants.QCLASS_INDEX)
# The list was generated by command:
# find `gettop` -type d -wholename `gettop`/out -prune -o -type d -name '.*'
# -print | awk -F/ '{{print $NF}}'| sort -u
PRUNENAMES = ['.abc', '.appveyor', '.azure-pipelines',
'.bazelci', '.build-id', '.buildkite', '.buildscript',
'.cargo', '.ci', '.circleci', '.clusterfuzzlite', '.conan',
'.devcontainer',
'.dwz',
'.externalToolBuilders',
'.git', '.githooks', '.github', '.gitlab', '.gitlab-ci', '.google',
'.hidden',
'.idea', '.intermediates',
'.jenkins',
'.kokoro',
'.libs_cffi_backend',
'.more', '.mvn',
'.prebuilt_info', '.private', '__pycache__',
'.repo',
'.settings', '.static', '.svn',
'.test',
'.travis',
'.travis_scripts',
'.tx',
'.vscode']
PRUNEPATHS = ['prebuilts']
ACLOUD_REPORT_FILE_RE = re.compile(r'.*--report[_-]file(=|\s+)(?P<report_file>[\w/.]+)')
def _delete_indexes():
"""Delete all available index files."""
for index in INDEXES:
if os.path.isfile(index):
os.remove(index)
def get_report_file(results_dir, acloud_args):
"""Get the acloud report file path.
This method can parse either string:
--acloud-create '--report-file=/tmp/acloud.json'
--acloud-create '--report-file /tmp/acloud.json'
and return '/tmp/acloud.json' as the report file. Otherwise returning the
default path(/tmp/atest_result/<hashed_dir>/acloud_status.json).
Args:
results_dir: string of directory to store atest results.
acloud_args: string of acloud create.
Returns:
A string path of acloud report file.
"""
match = ACLOUD_REPORT_FILE_RE.match(acloud_args)
if match:
return match.group('report_file')
return os.path.join(results_dir, 'acloud_status.json')
def has_command(cmd):
"""Detect if the command is available in PATH.
Args:
cmd: A string of the tested command.
Returns:
True if found, False otherwise.
"""
return bool(shutil.which(cmd))
def run_updatedb(search_root=SEARCH_TOP, output_cache=constants.LOCATE_CACHE,
**kwargs):
"""Run updatedb and generate cache in $ANDROID_HOST_OUT/indexes/plocate.db
Args:
search_root: The path of the search root(-U).
output_cache: The filename of the updatedb cache(-o).
kwargs: (optional)
prunepaths: A list of paths unwanted to be searched(-e).
prunenames: A list of dirname that won't be cached(-n).
"""
prunenames = kwargs.pop('prunenames', ' '.join(PRUNENAMES))
_prunepaths = [os.path.join(SEARCH_TOP, p) for p in PRUNEPATHS]
_prunepaths.append(str(au.get_build_out_dir()))
prunepaths = kwargs.pop('prunepaths', ' '.join(_prunepaths))
if kwargs:
raise TypeError('Unexpected **kwargs: %r' % kwargs)
updatedb_cmd = [UPDATEDB, '-l0']
updatedb_cmd.append('-U%s' % search_root)
updatedb_cmd.append('-n%s' % prunenames)
updatedb_cmd.append('-o%s' % output_cache)
# (b/206866627) /etc/updatedb.conf excludes /mnt from scanning on Linux.
# Use --prunepaths to override the default configuration.
updatedb_cmd.append('--prunepaths')
updatedb_cmd.append(prunepaths)
# Support scanning bind mounts as well.
updatedb_cmd.extend(['--prune-bind-mounts', 'no'])
logging.debug('Running updatedb... ')
try:
full_env_vars = os.environ.copy()
logging.debug('Executing: %s', updatedb_cmd)
if not os.path.isdir(constants.INDEX_DIR):
os.makedirs(constants.INDEX_DIR)
subprocess.run(updatedb_cmd, env=full_env_vars, check=True)
except (KeyboardInterrupt, SystemExit):
logging.error('Process interrupted or failure.')
# Delete indexes when plocate.db is locked() or other CalledProcessError.
# (b/141588997)
except subprocess.CalledProcessError as err:
logging.error('Executing %s error.', UPDATEDB)
metrics_utils.handle_exc_and_send_exit_event(
constants.PLOCATEDB_LOCKED)
if err.output:
logging.error(err.output)
os.remove(output_cache)
def _dump_index(dump_file, output, output_re, key, value):
"""Dump indexed data with pickle.
Args:
dump_file: A string of absolute path of the index file.
output: A string generated by locate and grep.
output_re: An regex which is used for grouping patterns.
key: A string for dictionary key, e.g. classname, package,
cc_class, etc.
value: A set of path.
The data structure will be like:
{
'Foo': {'/path/to/Foo.java', '/path2/to/Foo.kt'},
'Boo': {'/path3/to/Boo.java'}
}
"""
temp_file = tempfile.NamedTemporaryFile()
_dict = {}
with open(temp_file.name, 'wb') as cache_file:
if isinstance(output, bytes):
output = output.decode()
for entry in output.splitlines():
match = output_re.match(entry)
if match:
_dict.setdefault(match.group(key), set()).add(
match.group(value))
try:
pickle.dump(_dict, cache_file, protocol=2)
except IOError:
logging.error('Failed in dumping %s', dump_file)
shutil.copy(temp_file.name, dump_file)
temp_file.close()
# pylint: disable=anomalous-backslash-in-string
def get_cc_result(locatedb=constants.LOCATE_CACHE, **kwargs):
"""Search all testable cc/cpp and grep TEST(), TEST_F() or TEST_P().
After searching cc/cpp files, index corresponding data types in parallel.
Args:
locatedb: A string of the absolute path of the plocate.db
kwargs: (optional)
cc_class_index: A path string of the CC class index.
"""
find_cc_cmd = (
f"{LOCATE} -id{locatedb} --regex '/*.test.*\.(cc|cpp)$'"
f"| xargs egrep -sH '{constants.CC_GREP_RE}' 2>/dev/null || true")
logging.debug('Probing CC classes:\n %s', find_cc_cmd)
result = subprocess.check_output(find_cc_cmd, shell=True)
cc_class_index = kwargs.pop('cc_class_index', constants.CC_CLASS_INDEX)
au.run_multi_proc(func=_index_cc_classes, args=[result, cc_class_index])
# pylint: disable=anomalous-backslash-in-string
def get_java_result(locatedb=constants.LOCATE_CACHE, **kwargs):
"""Search all testable java/kt and grep package.
After searching java/kt files, index corresponding data types in parallel.
Args:
locatedb: A string of the absolute path of the plocate.db
kwargs: (optional)
class_index: A path string of the Java class index.
qclass_index: A path string of the qualified class index.
package_index: A path string of the package index.
"""
package_grep_re = r'^\s*package\s+[a-z][[:alnum:]]+[^{]'
find_java_cmd = (
f"{LOCATE} -id{locatedb} --regex '/*.test.*\.(java|kt)$' "
# (b/204398677) suppress stderr when indexing target terminated.
f"| xargs egrep -sH '{package_grep_re}' 2>/dev/null|| true")
logging.debug('Probing Java classes:\n %s', find_java_cmd)
result = subprocess.check_output(find_java_cmd, shell=True)
class_index = kwargs.pop('class_index', constants.CLASS_INDEX)
qclass_index = kwargs.pop('qclass_index', constants.QCLASS_INDEX)
package_index = kwargs.pop('package_index', constants.PACKAGE_INDEX)
au.run_multi_proc(func=_index_java_classes, args=[result, class_index])
au.run_multi_proc(func=_index_qualified_classes, args=[result, qclass_index])
au.run_multi_proc(func=_index_packages, args=[result, package_index])
def _index_cc_classes(output, index):
"""Index CC classes.
The data structure is like:
{
'FooTestCase': {'/path1/to/the/FooTestCase.cpp',
'/path2/to/the/FooTestCase.cc'}
}
Args:
output: A string object generated by get_cc_result().
index: A string path of the index file.
"""
logging.debug('indexing CC classes.')
_dump_index(dump_file=index, output=output,
output_re=constants.CC_OUTPUT_RE,
key='test_name', value='file_path')
def _index_java_classes(output, index):
"""Index Java classes.
The data structure is like:
{
'FooTestCase': {'/path1/to/the/FooTestCase.java',
'/path2/to/the/FooTestCase.kt'}
}
Args:
output: A string object generated by get_java_result().
index: A string path of the index file.
"""
logging.debug('indexing Java classes.')
_dump_index(dump_file=index, output=output,
output_re=constants.CLASS_OUTPUT_RE,
key='class', value='java_path')
def _index_packages(output, index):
"""Index Java packages.
The data structure is like:
{
'a.b.c.d': {'/path1/to/a/b/c/d/',
'/path2/to/a/b/c/d/'
}
Args:
output: A string object generated by get_java_result().
index: A string path of the index file.
"""
logging.debug('indexing packages.')
_dump_index(dump_file=index,
output=output, output_re=constants.PACKAGE_OUTPUT_RE,
key='package', value='java_dir')
def _index_qualified_classes(output, index):
"""Index Fully Qualified Java Classes(FQCN).
The data structure is like:
{
'a.b.c.d.FooTestCase': {'/path1/to/a/b/c/d/FooTestCase.java',
'/path2/to/a/b/c/d/FooTestCase.kt'}
}
Args:
output: A string object generated by get_java_result().
index: A string path of the index file.
"""
logging.debug('indexing qualified classes.')
temp_file = tempfile.NamedTemporaryFile()
_dict = {}
with open(temp_file.name, 'wb') as cache_file:
if isinstance(output, bytes):
output = output.decode()
for entry in output.split('\n'):
match = constants.QCLASS_OUTPUT_RE.match(entry)
if match:
fqcn = match.group('package') + '.' + match.group('class')
_dict.setdefault(fqcn, set()).add(match.group('java_path'))
try:
pickle.dump(_dict, cache_file, protocol=2)
except (KeyboardInterrupt, SystemExit):
logging.error('Process interrupted or failure.')
except IOError:
logging.error('Failed in dumping %s', index)
shutil.copy(temp_file.name, index)
temp_file.close()
def index_targets(output_cache=constants.LOCATE_CACHE):
"""The entrypoint of indexing targets.
Utilise plocate database to index reference types of CLASS, CC_CLASS,
PACKAGE and QUALIFIED_CLASS.
(b/206886222) The checksum and file size of plocate.db may differ even the
src is not changed at all; therefore, it will skip indexing when both
conditions are fulfilled:
- not undergo `repo sync` before running atest.
- file numbers recorded in current and previous plocate.db are the same.
Args:
output_cache: A file path of the updatedb cache
(e.g. /path/to/plocate.db).
"""
unavailable_cmds = [
cmd for cmd in [UPDATEDB, LOCATE] if not has_command(cmd)]
if unavailable_cmds:
logging.debug('command %s is unavailable; skip indexing...',
' '.join(unavailable_cmds))
return
# Get the amount of indexed files.
get_num_cmd = f'{LOCATE} -d{output_cache} --count /'
ret, pre_number = subprocess.getstatusoutput(get_num_cmd)
if ret != 0:
logging.debug('Failed to run %s', get_num_cmd)
pre_number = sys.maxsize
run_updatedb(SEARCH_TOP, output_cache)
checksum_file = os.path.join(constants.INDEX_DIR, 'repo_sync.md5')
repo_syncd = not au.check_md5(checksum_file, missing_ok=False)
if repo_syncd:
repo_file = Path(SEARCH_TOP).joinpath(
'.repo/.repo_fetchtimes.json')
au.run_multi_proc(
func=au.save_md5,
args=[[repo_file], checksum_file])
if not repo_syncd and pre_number == subprocess.getoutput(get_num_cmd):
logging.debug('%s remains the same. Ignore indexing', output_cache)
return
logging.debug('Indexing targets... ')
au.run_multi_proc(func=get_java_result, args=[output_cache])
au.run_multi_proc(func=get_cc_result, args=[output_cache])
def acloud_create(report_file, args, no_metrics_notice=True):
"""Method which runs acloud create with specified args in background.
Args:
report_file: A path string of acloud report file.
args: A string of arguments.
no_metrics_notice: Boolean whether sending data to metrics or not.
"""
notice = constants.NO_METRICS_ARG if no_metrics_notice else ""
match = ACLOUD_REPORT_FILE_RE.match(args)
report_file_arg = f'--report-file={report_file}' if not match else ""
# (b/161759557) Assume yes for acloud create to streamline atest flow.
acloud_cmd = ('acloud create -y {ACLOUD_ARGS} '
'{REPORT_FILE_ARG} '
'{METRICS_NOTICE} '
).format(ACLOUD_ARGS=args,
REPORT_FILE_ARG=report_file_arg,
METRICS_NOTICE=notice)
au.colorful_print("\nCreating AVD via acloud...", constants.CYAN)
logging.debug('Executing: %s', acloud_cmd)
start = time.time()
proc = subprocess.Popen(acloud_cmd, shell=True)
proc.communicate()
acloud_duration = time.time() - start
logging.info('"acloud create" process has completed.')
# Insert acloud create duration into the report file.
result = au.load_json_safely(report_file)
if result:
result[ACLOUD_DURATION] = acloud_duration
try:
with open(report_file, 'w+') as _wfile:
_wfile.write(json.dumps(result))
except OSError as e:
logging.error("Failed dumping duration to the report file: %s",
str(e))
def acloud_create_validator(results_dir, args):
"""Check lunch'd target before running 'acloud create'.
Args:
results_dir: A string of the results directory.
args: An argparse.Namespace object.
Returns:
If the target is valid:
A tuple of (multiprocessing.Process,
report_file path)
else:
A tuple of (None, None)
"""
target = os.getenv('TARGET_PRODUCT')
if not re.match(r'^(aosp_|)cf_.*', target):
au.colorful_print(
f'{target} is not in cuttlefish family; will not create any AVD.',
constants.RED)
return None, None
if args.start_avd:
args.acloud_create = []
acloud_args = ' '.join(args.acloud_create)
report_file = get_report_file(results_dir, acloud_args)
acloud_proc = au.run_multi_proc(
func=acloud_create,
args=[report_file, acloud_args, args.no_metrics])
return acloud_proc, report_file
def probe_acloud_status(report_file, find_build_duration):
"""Method which probes the 'acloud create' result status.
If the report file exists and the status is 'SUCCESS', then the creation is
successful.
Args:
report_file: A path string of acloud report file.
find_build_duration: A float of seconds.
Returns:
0: success.
8: acloud creation failure.
9: invalid acloud create arguments.
"""
# 1. Created but the status is not 'SUCCESS'
if Path(report_file).exists():
if not au.load_json_safely(report_file):
return ExitCode.AVD_CREATE_FAILURE
with open(report_file, 'r') as rfile:
result = json.load(rfile)
if result.get('status') == 'SUCCESS':
logging.info('acloud create successfully!')
# Always fetch the adb of the first created AVD.
adb_port = result.get('data').get('devices')[0].get('adb_port')
is_remote_instance = result.get('command') == 'create_cf'
adb_ip = '127.0.0.1' if is_remote_instance else '0.0.0.0'
os.environ[constants.ANDROID_SERIAL] = f'{adb_ip}:{adb_port}'
acloud_duration = get_acloud_duration(report_file)
if find_build_duration - acloud_duration >= 0:
# find+build took longer, saved acloud create time.
logging.debug('Saved acloud create time: %ss.',
acloud_duration)
metrics.LocalDetectEvent(
detect_type=DetectType.ACLOUD_CREATE,
result=round(acloud_duration))
else:
# acloud create took longer, saved find+build time.
logging.debug('Saved Find and Build time: %ss.',
find_build_duration)
metrics.LocalDetectEvent(
detect_type=DetectType.FIND_BUILD,
result=round(find_build_duration))
return ExitCode.SUCCESS
au.colorful_print(
'acloud create failed. Please check\n{}\nfor detail'.format(
report_file), constants.RED)
return ExitCode.AVD_CREATE_FAILURE
# 2. Failed to create because of invalid acloud arguments.
logging.error('Invalid acloud arguments found!')
return ExitCode.AVD_INVALID_ARGS
def get_acloud_duration(report_file):
"""Method which gets the duration of 'acloud create' from a report file.
Args:
report_file: A path string of acloud report file.
Returns:
An float of seconds which acloud create takes.
"""
content = au.load_json_safely(report_file)
if not content:
return 0
return content.get(ACLOUD_DURATION, 0)
if __name__ == '__main__':
if not os.getenv(constants.ANDROID_HOST_OUT, ''):
sys.exit()
index_targets()