diff --git a/scripts/perf-test/codebuild.yaml b/scripts/perf-test/codebuild.yaml index 01daf9ce93f..6e85e8eb281 100644 --- a/scripts/perf-test/codebuild.yaml +++ b/scripts/perf-test/codebuild.yaml @@ -32,9 +32,14 @@ Resources: Version: 2012-10-17 Statement: - Action: + - "s3:GetObject" - "s3:PutObject" Effect: Allow Resource: !Join ["/", [!Sub "arn:aws:s3:::${S3Bucket}", "*"]] + - Action: + - "s3:ListBucket" + Effect: Allow + Resource: !Sub "arn:aws:s3:::${S3Bucket}" - Action: - "logs:CreateLogGroup" - "logs:CreateLogStream" @@ -65,16 +70,31 @@ Resources: - echo "deb http://ppa.launchpad.net/ubuntu-toolchain-r/test/ubuntu trusty main" > /etc/apt/sources.list.d/toolchain.list - apt-key adv --keyserver keyserver.ubuntu.com --recv-keys BA9EF27F - apt-get update -y - - apt-get install -y libwww-perl g++-5 flex bison git openjdk-7-jdk + - apt-get install -y libwww-perl g++-5 flex bison git ccache - update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 1 - update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 1 + - | + cd /root/ + aws s3 cp \ + s3://${S3Bucket}/release-build-cache/ccache.zip \ + ccache.zip && unzip ccache.zip || true + cd $CODEBUILD_SRC_DIR build: commands: - echo ${Repository} > COMMIT_INFO - git rev-parse --short HEAD >> COMMIT_INFO - git log HEAD^..HEAD >> COMMIT_INFO - make -C src minisat2-download glucose-download cadical-download - - make -C src -j8 + - export CCACHE_NOHASHDIR=1 ; make -C src CXX="ccache g++" -j8 + - ccache -s + post_build: + commands: + - | + cd /root/ + zip -r ccache.zip .ccache + aws s3 cp ccache.zip \ + s3://${S3Bucket}/release-build-cache/ccache.zip + cd $CODEBUILD_SRC_DIR artifacts: files: - src/cbmc/cbmc @@ -107,16 +127,31 @@ Resources: - echo "deb http://ppa.launchpad.net/ubuntu-toolchain-r/test/ubuntu trusty main" > /etc/apt/sources.list.d/toolchain.list - apt-key adv --keyserver keyserver.ubuntu.com --recv-keys BA9EF27F - apt-get update -y - - apt-get install -y libwww-perl g++-5 flex bison git openjdk-7-jdk + - apt-get install -y libwww-perl g++-5 flex bison git ccache - update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 1 - update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 1 + - | + cd /root/ + aws s3 cp \ + s3://${S3Bucket}/profiling-build-cache/ccache.zip \ + ccache.zip && unzip ccache.zip || true + cd $CODEBUILD_SRC_DIR build: commands: - echo ${Repository} > COMMIT_INFO - git rev-parse --short HEAD >> COMMIT_INFO - git log HEAD^..HEAD >> COMMIT_INFO - make -C src minisat2-download glucose-download cadical-download - - make -C src -j8 CXXFLAGS="-O2 -pg -g -finline-limit=4" LINKFLAGS="-pg" + - export CCACHE_NOHASHDIR=1 ; make -C src CXX="ccache g++" -j8 CXXFLAGS="-O2 -pg -g -finline-limit=4" LINKFLAGS="-pg" + - ccache -s + post_build: + commands: + - | + cd /root/ + zip -r ccache.zip .ccache + aws s3 cp ccache.zip \ + s3://${S3Bucket}/profiling-build-cache/ccache.zip + cd $CODEBUILD_SRC_DIR artifacts: files: - src/cbmc/cbmc diff --git a/scripts/perf-test/ebs.yaml b/scripts/perf-test/ebs.yaml index ce3a96f4792..9c6e3c9eec6 100644 --- a/scripts/perf-test/ebs.yaml +++ b/scripts/perf-test/ebs.yaml @@ -28,7 +28,7 @@ Resources: apt-get -y update apt-get install git cd /mnt - git clone --depth 1 --branch svcomp18 \ + git clone --depth 1 --branch svcomp19 \ https://github.com/sosy-lab/sv-benchmarks.git git clone --depth 1 \ https://github.com/sosy-lab/benchexec.git diff --git a/scripts/perf-test/ec2-terminate.sh b/scripts/perf-test/ec2-terminate.sh new file mode 100644 index 00000000000..d4a9f455cf8 --- /dev/null +++ b/scripts/perf-test/ec2-terminate.sh @@ -0,0 +1,35 @@ +#!/bin/bash +### BEGIN INIT INFO +# Provides: ec2-terminate +# Required-Start: $network $syslog +# Required-Stop: +# Default-Start: +# Default-Stop: 0 1 6 +# Short-Description: ec2-terminate +# Description: send termination email +### END INIT INFO +# + +case "$1" in + start|status) + exit 0 + ;; + stop) + # run the below + ;; + *) + exit 1 + ;; +esac + +# send instance-terminated message +# http://rogueleaderr.com/post/48795010760/how-to-notifyemail-yourself-when-an-ec2-instance/amp +ut=$(cat /proc/uptime | cut -f1 -d" ") +aws --region us-east-1 sns publish \ + --topic-arn #SNSTOPIC# \ + --message "instance terminating after $ut s at #MAXPRICE# USD/h" +sleep 3 # make sure the message has time to send +aws s3 cp /var/log/cloud-init-output.log \ + s3://#S3BUCKET#/#PERFTESTID#/$HOSTNAME.cloud-init-output.log + +exit 0 diff --git a/scripts/perf-test/ec2.yaml b/scripts/perf-test/ec2.yaml index 0f113d4ce3d..9de33649f69 100644 --- a/scripts/perf-test/ec2.yaml +++ b/scripts/perf-test/ec2.yaml @@ -41,6 +41,9 @@ Parameters: WitnessCheck: Type: String + CompareTo: + Type: String + Conditions: UseSpot: !Not [!Equals [!Ref MaxPrice, ""]] @@ -68,6 +71,10 @@ Resources: - "s3:GetObject" Effect: Allow Resource: !Join ["/", [!Sub "arn:aws:s3:::${S3Bucket}", "*"]] + - Action: + - "s3:ListBucket" + Effect: Allow + Resource: !Sub "arn:aws:s3:::${S3Bucket}" - Action: - "sns:Publish" Effect: Allow @@ -145,313 +152,32 @@ Resources: #!/bin/bash set -x -e - # wait to make sure volume is available - sleep 10 - e2fsck -f -y /dev/xvdf - resize2fs /dev/xvdf - mount /dev/xvdf /mnt - - # install packages + # install awscli apt-get -y update - apt-get install -y git time wget binutils awscli make jq - apt-get install -y zip unzip - apt-get install -y gcc libc6-dev-i386 - apt-get install -y ant python3-tempita python - - # cgroup set up for benchexec - chmod o+wt '/sys/fs/cgroup/cpuset/' - chmod o+wt '/sys/fs/cgroup/cpu,cpuacct/user.slice' - chmod o+wt '/sys/fs/cgroup/memory/user.slice' - chmod o+wt '/sys/fs/cgroup/freezer/' + apt-get install -y awscli # AWS Sig-v4 access aws configure set s3.signature_version s3v4 - # send instance-terminated message - # http://rogueleaderr.com/post/48795010760/how-to-notifyemail-yourself-when-an-ec2-instance/amp - cat >/etc/init.d/ec2-terminate <<"EOF" - #!/bin/bash - ### BEGIN INIT INFO - # Provides: ec2-terminate - # Required-Start: $network $syslog - # Required-Stop: - # Default-Start: - # Default-Stop: 0 1 6 - # Short-Description: ec2-terminate - # Description: send termination email - ### END INIT INFO - # - - case "$1" in - start|status) - exit 0 - ;; - stop) - # run the below - ;; - *) - exit 1 - ;; - esac - - ut=$(cat /proc/uptime | cut -f1 -d" ") - aws --region us-east-1 sns publish \ - --topic-arn ${SnsTopic} \ - --message "instance terminating after $ut s at ${MaxPrice} USD/h" - sleep 3 # make sure the message has time to send - aws s3 cp /var/log/cloud-init-output.log \ - s3://${S3Bucket}/${PerfTestId}/$HOSTNAME.cloud-init-output.log - - exit 0 - EOF + aws s3 cp s3://${S3Bucket}/ec2-terminate.sh /etc/init.d/ec2-terminate + sed -i 's/#SNSTOPIC#/${SnsTopic}/g' /etc/init.d/ec2-terminate + sed -i 's/#MAXPRICE#/${MaxPrice}/g' /etc/init.d/ec2-terminate + sed -i 's/#S3BUCKET#/${S3Bucket}/g' /etc/init.d/ec2-terminate + sed -i 's/#PERFTESTID#/${PerfTestId}/g' /etc/init.d/ec2-terminate chmod a+x /etc/init.d/ec2-terminate update-rc.d ec2-terminate defaults systemctl start ec2-terminate - # update benchexec - cd /mnt/benchexec - git pull - - # prepare for tool packaging - cd /mnt - cd cprover-sv-comp - git pull - mkdir -p src/cbmc src/goto-cc - touch LICENSE - cd .. - mkdir -p run - cd run - wget -O cbmc.xml https://raw.githubusercontent.com/sosy-lab/sv-comp/master/benchmark-defs/cbmc.xml - sed -i 's/witness.graphml/${!logfile_path_abs}${!inputfile_name}-witness.graphml/' cbmc.xml - cd .. - mkdir -p tmp - export TMPDIR=/mnt/tmp - - if [ x${WitnessCheck} = xTrue ] - then - cd cpachecker - ant - - cd ../run - for def in \ - cpa-seq-validate-correctness-witnesses \ - cpa-seq-validate-violation-witnesses \ - fshell-witness2test-validate-violation-witnesses - do - wget -O $def.xml https://raw.githubusercontent.com/sosy-lab/sv-comp/master/benchmark-defs/$def.xml - sed -i 's#[\./]*/results-verified/LOGDIR/sv-comp18.\${!inputfile_name}.files/witness.graphml#witnesses/sv-comp18.${!inputfile_name}-witness.graphml#' $def.xml - done - - ln -s ../cpachecker/scripts/cpa.sh cpa.sh - ln -s ../cpachecker/config/ config - - cp ../fshell-w2t/* . - fi - - # reduce the likelihood of multiple hosts processing the - # same message (in addition to SQS's message hiding) - sleep $(expr $RANDOM % 30) - retry=1 - - while true - do - sqs=$(aws --region ${AWS::Region} sqs receive-message \ - --queue-url ${SqsUrl} | \ - jq -r '.Messages[0].Body,.Messages[0].ReceiptHandle') - - if [ -z "$sqs" ] - then - # no un-read messages in the input queue; let's look - # at -run - n_msgs=$(aws --region ${AWS::Region} sqs \ - get-queue-attributes \ - --queue-url ${SqsUrl}-run \ - --attribute-names \ - ApproximateNumberOfMessages | \ - jq -r '.Attributes.ApproximateNumberOfMessages') - - if [ $retry -eq 1 ] - then - retry=0 - sleep 30 - continue - elif [ -n "$n_msgs" ] && [ "$n_msgs" = "0" ] - then - # shut down the infrastructure - aws --region us-east-1 sns publish \ - --topic-arn ${SnsTopic} \ - --message "Trying to delete stacks in ${AWS::Region}" - aws --region ${AWS::Region} cloudformation \ - delete-stack --stack-name \ - perf-test-sqs-${PerfTestId} - aws --region ${AWS::Region} cloudformation \ - delete-stack --stack-name \ - perf-test-exec-${PerfTestId} - halt - fi - - # the queue is gone, or other host will be turning - # off the lights - halt - fi - - retry=1 - bm=$(echo $sqs | cut -f1 -d" ") - cfg=$(echo $bm | cut -f1 -d"-") - t=$(echo $bm | cut -f2- -d"-") - msg=$(echo $sqs | cut -f2- -d" ") - - # mark $bm in-progress - aws --region ${AWS::Region} sqs send-message \ - --queue-url ${SqsUrl}-run \ - --message-body $bm-$(hostname) - - # there is no guarantee of cross-queue action ordering - # sleep for a bit to reduce the likelihood of missing - # in-progress messages while the input queue is empty - sleep 3 - - # remove it from the input queue - aws --region ${AWS::Region} sqs delete-message \ - --queue-url ${SqsUrl} \ - --receipt-handle $msg - - cd /mnt/cprover-sv-comp - rm -f src/cbmc/cbmc src/goto-cc/goto-cc - aws s3 cp s3://${S3Bucket}/${PerfTestId}/$cfg/cbmc \ - src/cbmc/cbmc - aws s3 cp s3://${S3Bucket}/${PerfTestId}/$cfg/goto-cc \ - src/goto-cc/goto-cc - chmod a+x src/cbmc/cbmc src/goto-cc/goto-cc - make CBMC=. cbmc.zip - cd ../run - unzip ../cprover-sv-comp/cbmc.zip - mv cbmc cbmc-zip - mv cbmc-zip/* . - rmdir cbmc-zip - rm ../cprover-sv-comp/cbmc.zip - - date - echo "Task: $t" - - # compute the number of possible executors - max_par=$(cat /proc/cpuinfo | grep ^processor | wc -l) - mem=$(free -g | grep ^Mem | awk '{print $2}') - if [ $cfg != "profiling" ] - then - mem=$(expr $mem / 15) - else - mem=$(expr $mem / 7) - fi - if [ $mem -lt $max_par ] - then - max_par=$mem - fi - - if [ $cfg != "profiling" ] - then - ../benchexec/bin/benchexec cbmc.xml --no-container \ - --task $t -T 900s -M 15GB -o logs-$t/ \ - -N $max_par -c 1 - if [ -d logs-$t/cbmc.*.logfiles ] - then - cd logs-$t - tar czf witnesses.tar.gz cbmc.*.logfiles - rm -rf cbmc.*.logfiles - cd .. - - if [ x${WitnessCheck} = xTrue ] - then - for wc in *-witnesses.xml - do - wcp=$(echo $wc | sed 's/-witnesses.xml$//') - mkdir witnesses - tar -C witnesses --strip-components=1 -xzf \ - logs-$t/witnesses.tar.gz - ../benchexec/bin/benchexec --no-container \ - $wc --task $t -T 90s -M 15GB \ - -o $wcp-logs-$t/ -N $max_par -c 1 - rm -rf witnesses - done - fi - fi - start_date="$(echo ${PerfTestId} | cut -f1-3 -d-) $(echo ${PerfTestId} | cut -f4-6 -d- | sed 's/-/:/g')" - for l in *logs-$t/*.xml.bz2 - do - cd $(dirname $l) - bunzip2 *.xml.bz2 - perl -p -i -e \ - "s/^(/dev/null 2>&1 - then - gprof --sum ./cbmc-binary cbmc*.gmon.out.* - gprof ./cbmc-binary gmon.sum > sum.profile-$t - rm -f gmon.sum - gprof --sum ./goto-cc goto-cc*.gmon.out.* - gprof ./goto-cc gmon.sum > sum.goto-cc-profile-$t - rm -f gmon.sum gmon.out *.gmon.out.* - aws s3 cp sum.profile-$t \ - s3://${S3Bucket}/${PerfTestId}/$cfg/sum.profile-$t - aws s3 cp sum.goto-cc-profile-$t \ - s3://${S3Bucket}/${PerfTestId}/$cfg/sum.goto-cc-profile-$t - fi - fi - rm -rf logs-$t sum.profile-$t - date - - # clear out the in-progress message - while true - do - sqs=$(aws --region ${AWS::Region} sqs \ - receive-message \ - --queue-url ${SqsUrl}-run \ - --visibility-timeout 10 | \ - jq -r '.Messages[0].Body,.Messages[0].ReceiptHandle') - bm2=$(echo $sqs | cut -f1 -d" ") - msg2=$(echo $sqs | cut -f2- -d" ") - - if [ "$bm2" = "$bm-$(hostname)" ] - then - aws --region ${AWS::Region} sqs delete-message \ - --queue-url ${SqsUrl}-run \ - --receipt-handle $msg2 - break - fi - done - done + aws s3 cp s3://${S3Bucket}/run-on-ec2.sh /root/run-on-ec2.sh + sed -i 's#:WITNESSCHECK:#${WitnessCheck}#g' /root/run-on-ec2.sh + sed -i 's#:AWSREGION:#${AWS::Region}#g' /root/run-on-ec2.sh + sed -i 's#:SNSTOPIC:#${SnsTopic}#g' /root/run-on-ec2.sh + sed -i 's#:SQSURL:#${SqsUrl}#g' /root/run-on-ec2.sh + sed -i 's#:S3BUCKET:#${S3Bucket}#g' /root/run-on-ec2.sh + sed -i 's#:PERFTESTID:#${PerfTestId}#g' /root/run-on-ec2.sh + sed -i 's#:INSTANCETYPE:#${InstanceType}#g' /root/run-on-ec2.sh + sed -i 's#:COMPARETO:#${CompareTo}#g' /root/run-on-ec2.sh + . /root/run-on-ec2.sh AutoScalingGroup: Type: "AWS::AutoScaling::AutoScalingGroup" diff --git a/scripts/perf-test/perf_test.py b/scripts/perf-test/perf_test.py index 1af45b85665..604edf73658 100755 --- a/scripts/perf-test/perf_test.py +++ b/scripts/perf-test/perf_test.py @@ -72,6 +72,8 @@ def parse_args(): help='Non-default CodeBuild template to use') parser.add_argument('-W', '--witness-check', action='store_true', help='Run witness checks after benchmarking') + parser.add_argument('-C', '--compare-to', default=[], action='append', + help='Include past results in tables [may repeat]') args = parser.parse_args() @@ -193,6 +195,15 @@ def prepare_sns_s3(session, email, bucket_name): logger.info('us-east-1: SNS topic artifact_uploaded set up') prepare_s3(session, bucket_name, artifact_uploaded_arn) + s3 = session.client('s3', region_name='us-east-1') + s3.upload_file( + Bucket=bucket_name, + Key='ec2-terminate.sh', + Filename=same_dir('ec2-terminate.sh')) + s3.upload_file( + Bucket=bucket_name, + Key='run-on-ec2.sh', + Filename=same_dir('run-on-ec2.sh')) return instance_terminated_arn @@ -266,26 +277,27 @@ def select_region(session, mode, region, instance_type): logger.info('global: Lowest spot price: {} ({}): {}'.format( min_region, min_az, min_price)) - # http://aws-ubuntu.herokuapp.com/ - # 20180306 - Ubuntu 16.04 LTS (xenial) - hvm:ebs-ssd + # https://cloud-images.ubuntu.com/locator/ec2/ + # 20181124 - Ubuntu 18.04 LTS (bionic) - hvm:ebs-ssd AMI_ids = { "Mappings": { "RegionMap": { - "ap-northeast-1": { "64": "ami-0d74386b" }, - "ap-northeast-2": { "64": "ami-a414b9ca" }, - "ap-south-1": { "64": "ami-0189d76e" }, - "ap-southeast-1": { "64": "ami-52d4802e" }, - "ap-southeast-2": { "64": "ami-d38a4ab1" }, - "ca-central-1": { "64": "ami-ae55d2ca" }, - "eu-central-1": { "64": "ami-7c412f13" }, - "eu-west-1": { "64": "ami-f90a4880" }, - "eu-west-2": { "64": "ami-f4f21593" }, - "eu-west-3": { "64": "ami-0e55e373" }, - "sa-east-1": { "64": "ami-423d772e" }, - "us-east-1": { "64": "ami-43a15f3e" }, - "us-east-2": { "64": "ami-916f59f4" }, - "us-west-1": { "64": "ami-925144f2" }, - "us-west-2": { "64": "ami-4e79ed36" } + "ap-northeast-1": { "64": "ami-0fd02119f1653c976" }, + "ap-northeast-2": { "64": "ami-096560874cb404a4d" }, + "ap-northeast-3": { "64": "ami-064d6dc91cdb4daa8" }, + "ap-south-1": { "64": "ami-01187fe59c07cd350" }, + "ap-southeast-1": { "64": "ami-0efb24bbbf33a2fd7" }, + "ap-southeast-2": { "64": "ami-03932cb7d3a1a69af" }, + "ca-central-1": { "64": "ami-0388b9f812caf5c3f" }, + "eu-central-1": { "64": "ami-080d06f90eb293a27" }, + "eu-west-1": { "64": "ami-02790d1ebf3b5181d" }, + "eu-west-2": { "64": "ami-06328f1e652dc7605" }, + "eu-west-3": { "64": "ami-0697abcfa8916e673" }, + "sa-east-1": { "64": "ami-04fb8967affdf73b6" }, + "us-east-1": { "64": "ami-0d2505740b82f7948" }, + "us-east-2": { "64": "ami-0cf8cc36b8c81c6de" }, + "us-west-1": { "64": "ami-09c5eca75eed8245a" }, + "us-west-2": { "64": "ami-0f05ad41860678734" } } } } @@ -466,17 +478,18 @@ def seed_queue(session, region, queue, task_set): # set up the tasks logger = logging.getLogger('perf_test') - all_tasks = ['ConcurrencySafety-Main', 'DefinedBehavior-Arrays', - 'DefinedBehavior-TerminCrafted', 'MemSafety-Arrays', + all_tasks = ['ConcurrencySafety-Main', + 'MemSafety-Arrays', 'MemSafety-Heap', 'MemSafety-LinkedLists', - 'MemSafety-Other', 'MemSafety-TerminCrafted', - 'Overflows-BitVectors', 'Overflows-Other', + 'MemSafety-MemCleanup', + 'MemSafety-Other', + 'NoOverflows-BitVectors', 'NoOverflows-Other', 'ReachSafety-Arrays', 'ReachSafety-BitVectors', 'ReachSafety-ControlFlow', 'ReachSafety-ECA', 'ReachSafety-Floats', 'ReachSafety-Heap', 'ReachSafety-Loops', 'ReachSafety-ProductLines', 'ReachSafety-Recursive', 'ReachSafety-Sequentialized', - 'Systems_BusyBox_MemSafety', 'Systems_BusyBox_Overflows', + 'Systems_BusyBox_MemSafety', 'Systems_BusyBox_NoOverflows', 'Systems_DeviceDriversLinux64_ReachSafety', 'Termination-MainControlFlow', 'Termination-MainHeap', 'Termination-Other'] @@ -506,7 +519,7 @@ def seed_queue(session, region, queue, task_set): def run_perf_test( session, mode, region, az, ami, instance_type, sqs_arn, sqs_url, parallel, snapshot_id, instance_terminated_arn, bucket_name, - perf_test_id, price, ssh_key_name, witness_check): + perf_test_id, price, ssh_key_name, witness_check, compare_to): # create an EC2 instance and trigger benchmarking logger = logging.getLogger('perf_test') @@ -578,6 +591,10 @@ def run_perf_test( { 'ParameterKey': 'WitnessCheck', 'ParameterValue': str(witness_check) + }, + { + 'ParameterKey': 'CompareTo', + 'ParameterValue': ':'.join(compare_to) } ], Capabilities=['CAPABILITY_NAMED_IAM']) @@ -640,8 +657,8 @@ def main(): KeyName=args.ssh_key_name, PublicKeyMaterial=pk) # build a unique id for this performance test run - perf_test_id = str(datetime.datetime.utcnow().isoformat( - sep='-', timespec='seconds')) + '-' + args.commit_id + perf_test_id = str(datetime.datetime.utcnow().strftime( + '%Y-%m-%d-%H:%M:%S')) + '-' + args.commit_id perf_test_id = re.sub('[:/_\.\^~ ]', '-', perf_test_id) logger.info('global: Preparing performance test ' + perf_test_id) @@ -674,7 +691,7 @@ def main(): session, args.mode, region, az, ami, args.instance_type, sqs_arn, sqs_url, args.parallel, snapshot_id, instance_terminated_arn, bucket_name, perf_test_id, price, - args.ssh_key_name, args.witness_check) + args.ssh_key_name, args.witness_check, args.compare_to) return 0 diff --git a/scripts/perf-test/run-on-ec2.sh b/scripts/perf-test/run-on-ec2.sh new file mode 100644 index 00000000000..6b72fce3cd8 --- /dev/null +++ b/scripts/perf-test/run-on-ec2.sh @@ -0,0 +1,265 @@ +#!/bin/bash +set -x -e + +# set up the additional volume +sleep 10 +e2fsck -f -y /dev/xvdf +resize2fs /dev/xvdf +mount /dev/xvdf /mnt + +# install packages +apt-get install -y git time wget binutils make jq +apt-get install -y zip unzip +apt-get install -y gcc libc6-dev-i386 +apt-get install -y ant python3-tempita python + +# cgroup set up for benchexec +chmod o+wt '/sys/fs/cgroup/cpuset/' +chmod o+wt '/sys/fs/cgroup/cpu,cpuacct/user.slice' +chmod o+wt '/sys/fs/cgroup/memory/user.slice' +chmod o+wt '/sys/fs/cgroup/freezer/' + +# update benchmarks +cd /mnt/sv-benchmarks +git pull + +# update benchexec +cd /mnt/benchexec +git pull + +# prepare for tool packaging +cd /mnt +cd cprover-sv-comp +git pull +mkdir -p src/cbmc src/goto-cc +touch LICENSE +cd .. +mkdir -p run +cd run +wget -O cbmc.xml \ + https://raw.githubusercontent.com/sosy-lab/sv-comp/master/benchmark-defs/cbmc.xml +sed -i 's/witness.graphml/${logfile_path_abs}${inputfile_name}-witness.graphml/' cbmc.xml +cd .. +mkdir -p tmp +export TMPDIR=/mnt/tmp + +if [ x:WITNESSCHECK: = xTrue ] +then + cd cpachecker + git pull + ant + + cd ../run + for def in \ + cpa-seq-validate-correctness-witnesses \ + cpa-seq-validate-violation-witnesses \ + fshell-witness2test-validate-violation-witnesses + do + wget -O $def.xml \ + https://raw.githubusercontent.com/sosy-lab/sv-comp/master/benchmark-defs/$def.xml + sed -i 's#[\./]*/results-verified/LOGDIR/\${rundefinition_name}.\${inputfile_name}.files/witness.graphml#witnesses/${rundefinition_name}.${inputfile_name}-witness.graphml#' $def.xml + done + + ln -s ../cpachecker/scripts/cpa.sh cpa.sh + ln -s ../cpachecker/config/ config + + cd ../fshell-w2t + git pull + wget https://codeload.github.com/eliben/pycparser/zip/master -O pycparser-master.zip + unzip pycparser-master.zip + cd ../run + cp -a ../fshell-w2t/* . +fi + +# reduce the likelihood of multiple hosts processing the +# same message (in addition to SQS's message hiding) +sleep $(expr $RANDOM % 30) +retry=1 + +while true +do + sqs=$(aws --region :AWSREGION: sqs receive-message --queue-url :SQSURL: | \ + jq -r '.Messages[0].Body,.Messages[0].ReceiptHandle') + + if [ -z "$sqs" ] + then + # no un-read messages in the input queue; let's look + # at -run + n_msgs=$(aws --region :AWSREGION: sqs get-queue-attributes \ + --queue-url :SQSURL:-run --attribute-names ApproximateNumberOfMessages | \ + jq -r '.Attributes.ApproximateNumberOfMessages') + + if [ $retry -eq 1 ] + then + retry=0 + sleep 30 + continue + elif [ -n "$n_msgs" ] && [ "$n_msgs" = "0" ] + then + # shut down the infrastructure + aws --region us-east-1 sns publish --topic-arn :SNSTOPIC: \ + --message "Trying to delete stacks in :AWSREGION:" + aws --region :AWSREGION: cloudformation delete-stack --stack-name \ + perf-test-sqs-:PERFTESTID: + aws --region :AWSREGION: cloudformation delete-stack --stack-name \ + perf-test-exec-:PERFTESTID: + halt + fi + + # the queue is gone, or other host will be turning + # off the lights + halt + fi + + retry=1 + bm=$(echo $sqs | cut -f1 -d" ") + cfg=$(echo $bm | cut -f1 -d"-") + t=$(echo $bm | cut -f2- -d"-") + msg=$(echo $sqs | cut -f2- -d" ") + + # mark $bm in-progress + aws --region :AWSREGION: sqs send-message --queue-url :SQSURL:-run \ + --message-body $bm-$(hostname) + + # there is no guarantee of cross-queue action ordering + # sleep for a bit to reduce the likelihood of missing + # in-progress messages while the input queue is empty + sleep 3 + + # remove it from the input queue + aws --region :AWSREGION: sqs delete-message \ + --queue-url :SQSURL: --receipt-handle $msg + + cd /mnt/cprover-sv-comp + rm -f src/cbmc/cbmc src/goto-cc/goto-cc + aws s3 cp s3://:S3BUCKET:/:PERFTESTID:/$cfg/cbmc src/cbmc/cbmc + aws s3 cp s3://:S3BUCKET:/:PERFTESTID:/$cfg/goto-cc src/goto-cc/goto-cc + chmod a+x src/cbmc/cbmc src/goto-cc/goto-cc + make CBMC=. cbmc.zip + cd ../run + rm -f cbmc cbmc-binary goto-cc + unzip ../cprover-sv-comp/cbmc.zip + mv cbmc cbmc-zip + mv cbmc-zip/* . + rmdir cbmc-zip + rm ../cprover-sv-comp/cbmc.zip + + date + echo "Task: $t" + + # compute the number of possible executors + max_par=$(cat /proc/cpuinfo | grep ^processor | wc -l) + mem=$(free -g | grep ^Mem | awk '{print $2}') + if [ $cfg != "profiling" ] + then + mem=$(expr $mem / 15) + else + mem=$(expr $mem / 7) + fi + if [ $mem -lt $max_par ] + then + max_par=$mem + fi + + if [ $cfg != "profiling" ] + then + ../benchexec/bin/benchexec cbmc.xml --no-container \ + --task $t -T 900s -M 15GB -o logs-$t/ -N $max_par -c 1 + if [ -d logs-$t/cbmc.*.logfiles ] + then + cd logs-$t + tar czf witnesses.tar.gz cbmc.*.logfiles + rm -rf cbmc.*.logfiles + cd .. + + if [ x:WITNESSCHECK: = xTrue ] + then + for wc in *-witnesses.xml + do + wcp=$(echo $wc | sed 's/-witnesses.xml$//') + mkdir witnesses + tar -C witnesses --strip-components=1 -xzf logs-$t/witnesses.tar.gz + ../benchexec/bin/benchexec --no-container \ + $wc --task $t -T 90s -M 15GB -o $wcp-logs-$t/ -N $max_par -c 1 + rm -rf witnesses + done + fi + fi + start_date="$(echo :PERFTESTID: | cut -f1-3 -d-)" + start_date="$start_date $(echo :PERFTESTID: | cut -f4-6 -d- | sed 's/-/:/g')" + for l in *logs-$t/*.xml.bz2 + do + cd $(dirname $l) + bunzip2 *.xml.bz2 + perl -p -i -e \ + "s/^(/dev/null 2>&1 + then + gprof --sum ./cbmc-binary cbmc*.gmon.out.* + gprof ./cbmc-binary gmon.sum > sum.profile-$t + rm -f gmon.sum + gprof --sum ./goto-cc goto-cc*.gmon.out.* + gprof ./goto-cc gmon.sum > sum.goto-cc-profile-$t + rm -f gmon.sum gmon.out + rm -f cbmc*.gmon.out.* + rm -f goto-cc*.gmon.out.* + aws s3 cp sum.profile-$t s3://:S3BUCKET:/:PERFTESTID:/$cfg/sum.profile-$t + aws s3 cp sum.goto-cc-profile-$t \ + s3://:S3BUCKET:/:PERFTESTID:/$cfg/sum.goto-cc-profile-$t + fi + fi + rm -rf logs-$t sum.profile-$t + date + + # clear out the in-progress message + while true + do + sqs=$(aws --region :AWSREGION: sqs receive-message --queue-url :SQSURL:-run \ + --visibility-timeout 10 | jq -r '.Messages[0].Body,.Messages[0].ReceiptHandle') + bm2=$(echo $sqs | cut -f1 -d" ") + msg2=$(echo $sqs | cut -f2- -d" ") + + if [ "$bm2" = "$bm-$(hostname)" ] + then + aws --region :AWSREGION: sqs delete-message --queue-url :SQSURL:-run \ + --receipt-handle $msg2 + break + fi + done +done