未验证 提交 17fa0fa2 编写于 作者: L liyuqian 提交者: GitHub

Run and collect benchmarks (#14556)

This will start to collect engine benchmarks in the flutter-cirrus
Datastore for all post-submit commits.

We're using this to test how https://github.com/liyuqian/metrics_center
works. Once it's stablized, we should move metrics_center into
flutter/packages, and migrate data to flutter-infra Datastore.

Related issue: https://github.com/flutter/flutter/issues/37434
上级 d669f634
......@@ -19,6 +19,9 @@ task:
FRAMEWORK_PATH: "/tmp/master_framework"
PATH: "$FLUTTER_ENGINE/third_party/dart/tools/sdks/dart-sdk/bin:$DEPOT_TOOLS:$PATH"
USE_ANDROID: "False"
# TODO(liyuqian): currently we're using flutter-cirrus GCP project. Migrate
# to flutter-infra project once the metrics_center service is stabilized,
BENCHMARK_GCP_CREDENTIALS: ENCRYPTED[da76d2b7b39894de70fae1fc9182c97cc41400adc93f0f1c49bc7442f15fb933da8d756ed88523810a9a77c34f51a693]
setup_script: |
git clone --depth 1 https://chromium.googlesource.com/chromium/tools/depot_tools.git $DEPOT_TOOLS
mkdir -p $ENGINE_PATH/src
......@@ -29,6 +32,18 @@ task:
mv $CIRRUS_WORKING_DIR flutter
gclient sync
matrix:
- name: build_and_benchmark_linux_release
only_if: $CIRRUS_BRANCH == 'master' # Only run for post-submit commits.
compile_host_script: |
cd $ENGINE_PATH/src
./flutter/tools/gn --runtime-mode=release
ninja -C out/host_release
benchmark_host_script: |
cd $ENGINE_PATH/src/out/host_release/
./txt_benchmarks --benchmark_format=json > txt_benchmarks.json
cd $ENGINE_PATH/src/flutter/testing/benchmark
pub get
dart bin/parse_and_send.dart ../../../out/host_release/txt_benchmarks.json
- name: build_and_test_linux_unopt_debug
compile_host_script: |
cd $ENGINE_PATH/src
......
This is a Dart project that runs the engine benchmarks, and send the metrics to
the cloud for storage and analysis.
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert';
import 'dart:io';
import 'package:git/git.dart';
import 'package:metrics_center/flutter.dart';
import 'package:metrics_center/google_benchmark.dart';
Future<String> _getGitRevision() async {
final GitDir gitDir = await GitDir.fromExisting('../../');
// Somehow gitDir.currentBranch() doesn't work in Cirrus with "fatal: 'HEAD' -
// not a valid ref". Therefore, we use "git log" to get the revision manually.
final ProcessResult logResult =
await gitDir.runCommand(<String>['log', '--pretty=format:%H', '-n', '1']);
if (logResult.exitCode != 0) {
throw 'Unexpected exit code ${logResult.exitCode}';
}
return logResult.stdout.toString();
}
Future<List<FlutterEngineMetricPoint>> _parse(String jsonFileName) async {
final String gitRevision = await _getGitRevision();
final List<MetricPoint> rawPoints =
await GoogleBenchmarkParser.parse(jsonFileName);
final List<FlutterEngineMetricPoint> points = <FlutterEngineMetricPoint>[];
for (MetricPoint rawPoint in rawPoints) {
points.add(FlutterEngineMetricPoint(
rawPoint.tags[kNameKey],
rawPoint.value,
gitRevision,
moreTags: rawPoint.tags,
));
}
return points;
}
Future<void> main(List<String> args) async {
if (args.length != 1) {
throw 'Must have one argument: <benchmark_json_file>';
}
final List<FlutterEngineMetricPoint> points = await _parse(args[0]);
// The data will be sent to the Datastore of the GCP project specified through
// environment variable BENCHMARK_GCP_CREDENTIALS. The engine Cirrus job has
// currently configured the GCP project to flutter-cirrus for test. We'll
// eventually migrate to flutter-infra project once the test is done.
final FlutterDestination destination =
await FlutterDestination.makeFromCredentialsJson(
jsonDecode(Platform.environment['BENCHMARK_GCP_CREDENTIALS']),
);
await destination.update(points);
}
{
"context": {
"date": "2019-12-17 15:14:14",
"num_cpus": 56,
"mhz_per_cpu": 2594,
"cpu_scaling_enabled": true,
"library_build_type": "release"
},
"benchmarks": [
{
"name": "BM_PaintRecordInit",
"iterations": 6749079,
"real_time": 101,
"cpu_time": 101,
"time_unit": "ns"
},
{
"name": "BM_ParagraphShortLayout",
"iterations": 151761,
"real_time": 4460,
"cpu_time": 4460,
"time_unit": "ns"
},
{
"name": "BM_ParagraphStylesBigO_BigO",
"cpu_coefficient": 6548,
"real_coefficient": 6548,
"big_o": "N",
"time_unit": "ns"
}
]
}
name: flutter_engine_benchmark
dependencies:
git: any
metrics_center:
# TODO(liyuqian): once metrics_center is properly reviewed, add it to
# flutter/packages, publish on pub.dev, and use the published package here.
git: https://github.com/liyuqian/metrics_center.git
dev_dependencies:
test: any
pedantic: ^1.8.0
environment:
sdk: ">=2.2.2 <3.0.0"
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:io';
import 'package:test/test.dart';
void main() {
// In order to run this test, one should download a service account
// credentials json from a test GCP project, and put that json as
// `secret/test_gcp_credentials.json`. There's a `flutter-test` project for
// Flutter team members.
test('parse_and_send with example json does not crash.', () async {
final String testCred =
File('secret/test_gcp_credentials.json').readAsStringSync();
Process.runSync('dart', <String>[
'bin/parse_and_send.dart',
'example/txt_benchmarks.json',
], environment: <String, String>{
'BENCHMARK_GCP_CREDENTIALS': testCred,
});
});
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册