1# 2# Copyright (C) 2013 The Android Open Source Project 3# 4# Licensed under the Apache License, Version 2.0 (the "License"); 5# you may not use this file except in compliance with the License. 6# You may obtain a copy of the License at 7# 8# http://www.apache.org/licenses/LICENSE-2.0 9# 10# Unless required by applicable law or agreed to in writing, software 11# distributed under the License is distributed on an "AS IS" BASIS, 12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13# See the License for the specific language governing permissions and 14# limitations under the License. 15# 16 17"""Verifying the integrity of a Chrome OS update payload. 18 19This module is used internally by the main Payload class for verifying the 20integrity of an update payload. The interface for invoking the checks is as 21follows: 22 23 checker = PayloadChecker(payload) 24 checker.Run(...) 25""" 26 27from __future__ import absolute_import 28from __future__ import print_function 29 30import array 31import base64 32import collections 33import hashlib 34import itertools 35import os 36import subprocess 37 38from six.moves import range 39 40from update_payload import common 41from update_payload import error 42from update_payload import format_utils 43from update_payload import histogram 44from update_payload import update_metadata_pb2 45 46# 47# Constants. 48# 49 50_CHECK_MOVE_SAME_SRC_DST_BLOCK = 'move-same-src-dst-block' 51_CHECK_PAYLOAD_SIG = 'payload-sig' 52CHECKS_TO_DISABLE = ( 53 _CHECK_MOVE_SAME_SRC_DST_BLOCK, 54 _CHECK_PAYLOAD_SIG, 55) 56 57_TYPE_FULL = 'full' 58_TYPE_DELTA = 'delta' 59 60_DEFAULT_BLOCK_SIZE = 4096 61 62_DEFAULT_PUBKEY_BASE_NAME = 'update-payload-key.pub.pem' 63_DEFAULT_PUBKEY_FILE_NAME = os.path.join(os.path.dirname(__file__), 64 _DEFAULT_PUBKEY_BASE_NAME) 65 66# Supported minor version map to payload types allowed to be using them. 67_SUPPORTED_MINOR_VERSIONS = { 68 0: (_TYPE_FULL,), 69 2: (_TYPE_DELTA,), 70 3: (_TYPE_DELTA,), 71 4: (_TYPE_DELTA,), 72 5: (_TYPE_DELTA,), 73 6: (_TYPE_DELTA,), 74} 75 76 77# 78# Helper functions. 79# 80 81def _IsPowerOfTwo(val): 82 """Returns True iff val is a power of two.""" 83 return val > 0 and (val & (val - 1)) == 0 84 85 86def _AddFormat(format_func, value): 87 """Adds a custom formatted representation to ordinary string representation. 88 89 Args: 90 format_func: A value formatter. 91 value: Value to be formatted and returned. 92 93 Returns: 94 A string 'x (y)' where x = str(value) and y = format_func(value). 95 """ 96 ret = str(value) 97 formatted_str = format_func(value) 98 if formatted_str: 99 ret += ' (%s)' % formatted_str 100 return ret 101 102 103def _AddHumanReadableSize(size): 104 """Adds a human readable representation to a byte size value.""" 105 return _AddFormat(format_utils.BytesToHumanReadable, size) 106 107 108# 109# Payload report generator. 110# 111 112class _PayloadReport(object): 113 """A payload report generator. 114 115 A report is essentially a sequence of nodes, which represent data points. It 116 is initialized to have a "global", untitled section. A node may be a 117 sub-report itself. 118 """ 119 120 # Report nodes: Field, sub-report, section. 121 class Node(object): 122 """A report node interface.""" 123 124 @staticmethod 125 def _Indent(indent, line): 126 """Indents a line by a given indentation amount. 127 128 Args: 129 indent: The indentation amount. 130 line: The line content (string). 131 132 Returns: 133 The properly indented line (string). 134 """ 135 return '%*s%s' % (indent, '', line) 136 137 def GenerateLines(self, base_indent, sub_indent, curr_section): 138 """Generates the report lines for this node. 139 140 Args: 141 base_indent: Base indentation for each line. 142 sub_indent: Additional indentation for sub-nodes. 143 curr_section: The current report section object. 144 145 Returns: 146 A pair consisting of a list of properly indented report lines and a new 147 current section object. 148 """ 149 raise NotImplementedError 150 151 class FieldNode(Node): 152 """A field report node, representing a (name, value) pair.""" 153 154 def __init__(self, name, value, linebreak, indent): 155 super(_PayloadReport.FieldNode, self).__init__() 156 self.name = name 157 self.value = value 158 self.linebreak = linebreak 159 self.indent = indent 160 161 def GenerateLines(self, base_indent, sub_indent, curr_section): 162 """Generates a properly formatted 'name : value' entry.""" 163 report_output = '' 164 if self.name: 165 report_output += self.name.ljust(curr_section.max_field_name_len) + ' :' 166 value_lines = str(self.value).splitlines() 167 if self.linebreak and self.name: 168 report_output += '\n' + '\n'.join( 169 ['%*s%s' % (self.indent, '', line) for line in value_lines]) 170 else: 171 if self.name: 172 report_output += ' ' 173 report_output += '%*s' % (self.indent, '') 174 cont_line_indent = len(report_output) 175 indented_value_lines = [value_lines[0]] 176 indented_value_lines.extend(['%*s%s' % (cont_line_indent, '', line) 177 for line in value_lines[1:]]) 178 report_output += '\n'.join(indented_value_lines) 179 180 report_lines = [self._Indent(base_indent, line + '\n') 181 for line in report_output.split('\n')] 182 return report_lines, curr_section 183 184 class SubReportNode(Node): 185 """A sub-report node, representing a nested report.""" 186 187 def __init__(self, title, report): 188 super(_PayloadReport.SubReportNode, self).__init__() 189 self.title = title 190 self.report = report 191 192 def GenerateLines(self, base_indent, sub_indent, curr_section): 193 """Recurse with indentation.""" 194 report_lines = [self._Indent(base_indent, self.title + ' =>\n')] 195 report_lines.extend(self.report.GenerateLines(base_indent + sub_indent, 196 sub_indent)) 197 return report_lines, curr_section 198 199 class SectionNode(Node): 200 """A section header node.""" 201 202 def __init__(self, title=None): 203 super(_PayloadReport.SectionNode, self).__init__() 204 self.title = title 205 self.max_field_name_len = 0 206 207 def GenerateLines(self, base_indent, sub_indent, curr_section): 208 """Dump a title line, return self as the (new) current section.""" 209 report_lines = [] 210 if self.title: 211 report_lines.append(self._Indent(base_indent, 212 '=== %s ===\n' % self.title)) 213 return report_lines, self 214 215 def __init__(self): 216 self.report = [] 217 self.last_section = self.global_section = self.SectionNode() 218 self.is_finalized = False 219 220 def GenerateLines(self, base_indent, sub_indent): 221 """Generates the lines in the report, properly indented. 222 223 Args: 224 base_indent: The indentation used for root-level report lines. 225 sub_indent: The indentation offset used for sub-reports. 226 227 Returns: 228 A list of indented report lines. 229 """ 230 report_lines = [] 231 curr_section = self.global_section 232 for node in self.report: 233 node_report_lines, curr_section = node.GenerateLines( 234 base_indent, sub_indent, curr_section) 235 report_lines.extend(node_report_lines) 236 237 return report_lines 238 239 def Dump(self, out_file, base_indent=0, sub_indent=2): 240 """Dumps the report to a file. 241 242 Args: 243 out_file: File object to output the content to. 244 base_indent: Base indentation for report lines. 245 sub_indent: Added indentation for sub-reports. 246 """ 247 report_lines = self.GenerateLines(base_indent, sub_indent) 248 if report_lines and not self.is_finalized: 249 report_lines.append('(incomplete report)\n') 250 251 for line in report_lines: 252 out_file.write(line) 253 254 def AddField(self, name, value, linebreak=False, indent=0): 255 """Adds a field/value pair to the payload report. 256 257 Args: 258 name: The field's name. 259 value: The field's value. 260 linebreak: Whether the value should be printed on a new line. 261 indent: Amount of extra indent for each line of the value. 262 """ 263 assert not self.is_finalized 264 if name and self.last_section.max_field_name_len < len(name): 265 self.last_section.max_field_name_len = len(name) 266 self.report.append(self.FieldNode(name, value, linebreak, indent)) 267 268 def AddSubReport(self, title): 269 """Adds and returns a sub-report with a title.""" 270 assert not self.is_finalized 271 sub_report = self.SubReportNode(title, type(self)()) 272 self.report.append(sub_report) 273 return sub_report.report 274 275 def AddSection(self, title): 276 """Adds a new section title.""" 277 assert not self.is_finalized 278 self.last_section = self.SectionNode(title) 279 self.report.append(self.last_section) 280 281 def Finalize(self): 282 """Seals the report, marking it as complete.""" 283 self.is_finalized = True 284 285 286# 287# Payload verification. 288# 289 290class PayloadChecker(object): 291 """Checking the integrity of an update payload. 292 293 This is a short-lived object whose purpose is to isolate the logic used for 294 verifying the integrity of an update payload. 295 """ 296 297 def __init__(self, payload, assert_type=None, block_size=0, 298 allow_unhashed=False, disabled_tests=()): 299 """Initialize the checker. 300 301 Args: 302 payload: The payload object to check. 303 assert_type: Assert that payload is either 'full' or 'delta' (optional). 304 block_size: Expected filesystem / payload block size (optional). 305 allow_unhashed: Allow operations with unhashed data blobs. 306 disabled_tests: Sequence of tests to disable. 307 """ 308 if not payload.is_init: 309 raise ValueError('Uninitialized update payload.') 310 311 # Set checker configuration. 312 self.payload = payload 313 self.block_size = block_size if block_size else _DEFAULT_BLOCK_SIZE 314 if not _IsPowerOfTwo(self.block_size): 315 raise error.PayloadError( 316 'Expected block (%d) size is not a power of two.' % self.block_size) 317 if assert_type not in (None, _TYPE_FULL, _TYPE_DELTA): 318 raise error.PayloadError('Invalid assert_type value (%r).' % 319 assert_type) 320 self.payload_type = assert_type 321 self.allow_unhashed = allow_unhashed 322 323 # Disable specific tests. 324 self.check_move_same_src_dst_block = ( 325 _CHECK_MOVE_SAME_SRC_DST_BLOCK not in disabled_tests) 326 self.check_payload_sig = _CHECK_PAYLOAD_SIG not in disabled_tests 327 328 # Reset state; these will be assigned when the manifest is checked. 329 self.sigs_offset = 0 330 self.sigs_size = 0 331 self.old_part_info = {} 332 self.new_part_info = {} 333 self.new_fs_sizes = collections.defaultdict(int) 334 self.old_fs_sizes = collections.defaultdict(int) 335 self.minor_version = None 336 self.major_version = None 337 338 @staticmethod 339 def _CheckElem(msg, name, report, is_mandatory, is_submsg, convert=str, 340 msg_name=None, linebreak=False, indent=0): 341 """Adds an element from a protobuf message to the payload report. 342 343 Checks to see whether a message contains a given element, and if so adds 344 the element value to the provided report. A missing mandatory element 345 causes an exception to be raised. 346 347 Args: 348 msg: The message containing the element. 349 name: The name of the element. 350 report: A report object to add the element name/value to. 351 is_mandatory: Whether or not this element must be present. 352 is_submsg: Whether this element is itself a message. 353 convert: A function for converting the element value for reporting. 354 msg_name: The name of the message object (for error reporting). 355 linebreak: Whether the value report should induce a line break. 356 indent: Amount of indent used for reporting the value. 357 358 Returns: 359 A pair consisting of the element value and the generated sub-report for 360 it (if the element is a sub-message, None otherwise). If the element is 361 missing, returns (None, None). 362 363 Raises: 364 error.PayloadError if a mandatory element is missing. 365 """ 366 element_result = collections.namedtuple('element_result', ['msg', 'report']) 367 368 if not msg.HasField(name): 369 if is_mandatory: 370 raise error.PayloadError('%smissing mandatory %s %r.' % 371 (msg_name + ' ' if msg_name else '', 372 'sub-message' if is_submsg else 'field', 373 name)) 374 return element_result(None, None) 375 376 value = getattr(msg, name) 377 if is_submsg: 378 return element_result(value, report and report.AddSubReport(name)) 379 else: 380 if report: 381 report.AddField(name, convert(value), linebreak=linebreak, 382 indent=indent) 383 return element_result(value, None) 384 385 @staticmethod 386 def _CheckRepeatedElemNotPresent(msg, field_name, msg_name): 387 """Checks that a repeated element is not specified in the message. 388 389 Args: 390 msg: The message containing the element. 391 field_name: The name of the element. 392 msg_name: The name of the message object (for error reporting). 393 394 Raises: 395 error.PayloadError if the repeated element is present or non-empty. 396 """ 397 if getattr(msg, field_name, None): 398 raise error.PayloadError('%sfield %r not empty.' % 399 (msg_name + ' ' if msg_name else '', field_name)) 400 401 @staticmethod 402 def _CheckElemNotPresent(msg, field_name, msg_name): 403 """Checks that an element is not specified in the message. 404 405 Args: 406 msg: The message containing the element. 407 field_name: The name of the element. 408 msg_name: The name of the message object (for error reporting). 409 410 Raises: 411 error.PayloadError if the repeated element is present. 412 """ 413 if msg.HasField(field_name): 414 raise error.PayloadError('%sfield %r exists.' % 415 (msg_name + ' ' if msg_name else '', field_name)) 416 417 @staticmethod 418 def _CheckMandatoryField(msg, field_name, report, msg_name, convert=str, 419 linebreak=False, indent=0): 420 """Adds a mandatory field; returning first component from _CheckElem.""" 421 return PayloadChecker._CheckElem(msg, field_name, report, True, False, 422 convert=convert, msg_name=msg_name, 423 linebreak=linebreak, indent=indent)[0] 424 425 @staticmethod 426 def _CheckOptionalField(msg, field_name, report, convert=str, 427 linebreak=False, indent=0): 428 """Adds an optional field; returning first component from _CheckElem.""" 429 return PayloadChecker._CheckElem(msg, field_name, report, False, False, 430 convert=convert, linebreak=linebreak, 431 indent=indent)[0] 432 433 @staticmethod 434 def _CheckMandatorySubMsg(msg, submsg_name, report, msg_name): 435 """Adds a mandatory sub-message; wrapper for _CheckElem.""" 436 return PayloadChecker._CheckElem(msg, submsg_name, report, True, True, 437 msg_name) 438 439 @staticmethod 440 def _CheckOptionalSubMsg(msg, submsg_name, report): 441 """Adds an optional sub-message; wrapper for _CheckElem.""" 442 return PayloadChecker._CheckElem(msg, submsg_name, report, False, True) 443 444 @staticmethod 445 def _CheckPresentIff(val1, val2, name1, name2, obj_name): 446 """Checks that val1 is None iff val2 is None. 447 448 Args: 449 val1: first value to be compared. 450 val2: second value to be compared. 451 name1: name of object holding the first value. 452 name2: name of object holding the second value. 453 obj_name: Name of the object containing these values. 454 455 Raises: 456 error.PayloadError if assertion does not hold. 457 """ 458 if None in (val1, val2) and val1 is not val2: 459 present, missing = (name1, name2) if val2 is None else (name2, name1) 460 raise error.PayloadError('%r present without %r%s.' % 461 (present, missing, 462 ' in ' + obj_name if obj_name else '')) 463 464 @staticmethod 465 def _CheckPresentIffMany(vals, name, obj_name): 466 """Checks that a set of vals and names imply every other element. 467 468 Args: 469 vals: The set of values to be compared. 470 name: The name of the objects holding the corresponding value. 471 obj_name: Name of the object containing these values. 472 473 Raises: 474 error.PayloadError if assertion does not hold. 475 """ 476 if any(vals) and not all(vals): 477 raise error.PayloadError('%r is not present in all values%s.' % 478 (name, ' in ' + obj_name if obj_name else '')) 479 480 @staticmethod 481 def _Run(cmd, send_data=None): 482 """Runs a subprocess, returns its output. 483 484 Args: 485 cmd: Sequence of command-line argument for invoking the subprocess. 486 send_data: Data to feed to the process via its stdin. 487 488 Returns: 489 A tuple containing the stdout and stderr output of the process. 490 """ 491 run_process = subprocess.Popen(cmd, stdin=subprocess.PIPE, 492 stdout=subprocess.PIPE) 493 try: 494 result = run_process.communicate(input=send_data) 495 finally: 496 exit_code = run_process.wait() 497 498 if exit_code: 499 raise RuntimeError('Subprocess %r failed with code %r.' % 500 (cmd, exit_code)) 501 502 return result 503 504 @staticmethod 505 def _CheckSha256Signature(sig_data, pubkey_file_name, actual_hash, sig_name): 506 """Verifies an actual hash against a signed one. 507 508 Args: 509 sig_data: The raw signature data. 510 pubkey_file_name: Public key used for verifying signature. 511 actual_hash: The actual hash digest. 512 sig_name: Signature name for error reporting. 513 514 Raises: 515 error.PayloadError if signature could not be verified. 516 """ 517 if len(sig_data) != 256: 518 raise error.PayloadError( 519 '%s: signature size (%d) not as expected (256).' % 520 (sig_name, len(sig_data))) 521 signed_data, _ = PayloadChecker._Run( 522 ['openssl', 'rsautl', '-verify', '-pubin', '-inkey', pubkey_file_name], 523 send_data=sig_data) 524 525 if len(signed_data) != len(common.SIG_ASN1_HEADER) + 32: 526 raise error.PayloadError('%s: unexpected signed data length (%d).' % 527 (sig_name, len(signed_data))) 528 529 if not signed_data.startswith(common.SIG_ASN1_HEADER): 530 raise error.PayloadError('%s: not containing standard ASN.1 prefix.' % 531 sig_name) 532 533 signed_hash = signed_data[len(common.SIG_ASN1_HEADER):] 534 if signed_hash != actual_hash: 535 raise error.PayloadError( 536 '%s: signed hash (%s) different from actual (%s).' % 537 (sig_name, common.FormatSha256(signed_hash), 538 common.FormatSha256(actual_hash))) 539 540 @staticmethod 541 def _CheckBlocksFitLength(length, num_blocks, block_size, length_name, 542 block_name=None): 543 """Checks that a given length fits given block space. 544 545 This ensures that the number of blocks allocated is appropriate for the 546 length of the data residing in these blocks. 547 548 Args: 549 length: The actual length of the data. 550 num_blocks: The number of blocks allocated for it. 551 block_size: The size of each block in bytes. 552 length_name: Name of length (used for error reporting). 553 block_name: Name of block (used for error reporting). 554 555 Raises: 556 error.PayloadError if the aforementioned invariant is not satisfied. 557 """ 558 # Check: length <= num_blocks * block_size. 559 if length > num_blocks * block_size: 560 raise error.PayloadError( 561 '%s (%d) > num %sblocks (%d) * block_size (%d).' % 562 (length_name, length, block_name or '', num_blocks, block_size)) 563 564 # Check: length > (num_blocks - 1) * block_size. 565 if length <= (num_blocks - 1) * block_size: 566 raise error.PayloadError( 567 '%s (%d) <= (num %sblocks - 1 (%d)) * block_size (%d).' % 568 (length_name, length, block_name or '', num_blocks - 1, block_size)) 569 570 def _CheckManifestMinorVersion(self, report): 571 """Checks the payload manifest minor_version field. 572 573 Args: 574 report: The report object to add to. 575 576 Raises: 577 error.PayloadError if any of the checks fail. 578 """ 579 self.minor_version = self._CheckOptionalField(self.payload.manifest, 580 'minor_version', report) 581 if self.minor_version in _SUPPORTED_MINOR_VERSIONS: 582 if self.payload_type not in _SUPPORTED_MINOR_VERSIONS[self.minor_version]: 583 raise error.PayloadError( 584 'Minor version %d not compatible with payload type %s.' % 585 (self.minor_version, self.payload_type)) 586 elif self.minor_version is None: 587 raise error.PayloadError('Minor version is not set.') 588 else: 589 raise error.PayloadError('Unsupported minor version: %d' % 590 self.minor_version) 591 592 def _CheckManifest(self, report, part_sizes=None): 593 """Checks the payload manifest. 594 595 Args: 596 report: A report object to add to. 597 part_sizes: Map of partition label to partition size in bytes. 598 599 Returns: 600 A tuple consisting of the partition block size used during the update 601 (integer), the signatures block offset and size. 602 603 Raises: 604 error.PayloadError if any of the checks fail. 605 """ 606 self.major_version = self.payload.header.version 607 608 part_sizes = part_sizes or collections.defaultdict(int) 609 manifest = self.payload.manifest 610 report.AddSection('manifest') 611 612 # Check: block_size must exist and match the expected value. 613 actual_block_size = self._CheckMandatoryField(manifest, 'block_size', 614 report, 'manifest') 615 if actual_block_size != self.block_size: 616 raise error.PayloadError('Block_size (%d) not as expected (%d).' % 617 (actual_block_size, self.block_size)) 618 619 # Check: signatures_offset <==> signatures_size. 620 self.sigs_offset = self._CheckOptionalField(manifest, 'signatures_offset', 621 report) 622 self.sigs_size = self._CheckOptionalField(manifest, 'signatures_size', 623 report) 624 self._CheckPresentIff(self.sigs_offset, self.sigs_size, 625 'signatures_offset', 'signatures_size', 'manifest') 626 627 for part in manifest.partitions: 628 name = part.partition_name 629 self.old_part_info[name] = self._CheckOptionalSubMsg( 630 part, 'old_partition_info', report) 631 self.new_part_info[name] = self._CheckMandatorySubMsg( 632 part, 'new_partition_info', report, 'manifest.partitions') 633 634 # Check: Old-style partition infos should not be specified. 635 for _, part in common.CROS_PARTITIONS: 636 self._CheckElemNotPresent(manifest, 'old_%s_info' % part, 'manifest') 637 self._CheckElemNotPresent(manifest, 'new_%s_info' % part, 'manifest') 638 639 # Check: If old_partition_info is specified anywhere, it must be 640 # specified everywhere. 641 old_part_msgs = [part.msg for part in self.old_part_info.values() if part] 642 self._CheckPresentIffMany(old_part_msgs, 'old_partition_info', 643 'manifest.partitions') 644 645 is_delta = any(part and part.msg for part in self.old_part_info.values()) 646 if is_delta: 647 # Assert/mark delta payload. 648 if self.payload_type == _TYPE_FULL: 649 raise error.PayloadError( 650 'Apparent full payload contains old_{kernel,rootfs}_info.') 651 self.payload_type = _TYPE_DELTA 652 653 for part, (msg, part_report) in self.old_part_info.items(): 654 # Check: {size, hash} present in old_{kernel,rootfs}_info. 655 field = 'old_%s_info' % part 656 self.old_fs_sizes[part] = self._CheckMandatoryField(msg, 'size', 657 part_report, field) 658 self._CheckMandatoryField(msg, 'hash', part_report, field, 659 convert=common.FormatSha256) 660 661 # Check: old_{kernel,rootfs} size must fit in respective partition. 662 if self.old_fs_sizes[part] > part_sizes[part] > 0: 663 raise error.PayloadError( 664 'Old %s content (%d) exceed partition size (%d).' % 665 (part, self.old_fs_sizes[part], part_sizes[part])) 666 else: 667 # Assert/mark full payload. 668 if self.payload_type == _TYPE_DELTA: 669 raise error.PayloadError( 670 'Apparent delta payload missing old_{kernel,rootfs}_info.') 671 self.payload_type = _TYPE_FULL 672 673 # Check: new_{kernel,rootfs}_info present; contains {size, hash}. 674 for part, (msg, part_report) in self.new_part_info.items(): 675 field = 'new_%s_info' % part 676 self.new_fs_sizes[part] = self._CheckMandatoryField(msg, 'size', 677 part_report, field) 678 self._CheckMandatoryField(msg, 'hash', part_report, field, 679 convert=common.FormatSha256) 680 681 # Check: new_{kernel,rootfs} size must fit in respective partition. 682 if self.new_fs_sizes[part] > part_sizes[part] > 0: 683 raise error.PayloadError( 684 'New %s content (%d) exceed partition size (%d).' % 685 (part, self.new_fs_sizes[part], part_sizes[part])) 686 687 # Check: minor_version makes sense for the payload type. This check should 688 # run after the payload type has been set. 689 self._CheckManifestMinorVersion(report) 690 691 def _CheckLength(self, length, total_blocks, op_name, length_name): 692 """Checks whether a length matches the space designated in extents. 693 694 Args: 695 length: The total length of the data. 696 total_blocks: The total number of blocks in extents. 697 op_name: Operation name (for error reporting). 698 length_name: Length name (for error reporting). 699 700 Raises: 701 error.PayloadError is there a problem with the length. 702 """ 703 # Check: length is non-zero. 704 if length == 0: 705 raise error.PayloadError('%s: %s is zero.' % (op_name, length_name)) 706 707 # Check that length matches number of blocks. 708 self._CheckBlocksFitLength(length, total_blocks, self.block_size, 709 '%s: %s' % (op_name, length_name)) 710 711 def _CheckExtents(self, extents, usable_size, block_counters, name): 712 """Checks a sequence of extents. 713 714 Args: 715 extents: The sequence of extents to check. 716 usable_size: The usable size of the partition to which the extents apply. 717 block_counters: Array of counters corresponding to the number of blocks. 718 name: The name of the extent block. 719 720 Returns: 721 The total number of blocks in the extents. 722 723 Raises: 724 error.PayloadError if any of the entailed checks fails. 725 """ 726 total_num_blocks = 0 727 for ex, ex_name in common.ExtentIter(extents, name): 728 # Check: Mandatory fields. 729 start_block = PayloadChecker._CheckMandatoryField(ex, 'start_block', 730 None, ex_name) 731 num_blocks = PayloadChecker._CheckMandatoryField(ex, 'num_blocks', None, 732 ex_name) 733 end_block = start_block + num_blocks 734 735 # Check: num_blocks > 0. 736 if num_blocks == 0: 737 raise error.PayloadError('%s: extent length is zero.' % ex_name) 738 739 # Check: Make sure we're within the partition limit. 740 if usable_size and end_block * self.block_size > usable_size: 741 raise error.PayloadError( 742 '%s: extent (%s) exceeds usable partition size (%d).' % 743 (ex_name, common.FormatExtent(ex, self.block_size), usable_size)) 744 745 # Record block usage. 746 for i in range(start_block, end_block): 747 block_counters[i] += 1 748 749 total_num_blocks += num_blocks 750 751 return total_num_blocks 752 753 def _CheckReplaceOperation(self, op, data_length, total_dst_blocks, op_name): 754 """Specific checks for REPLACE/REPLACE_BZ/REPLACE_XZ operations. 755 756 Args: 757 op: The operation object from the manifest. 758 data_length: The length of the data blob associated with the operation. 759 total_dst_blocks: Total number of blocks in dst_extents. 760 op_name: Operation name for error reporting. 761 762 Raises: 763 error.PayloadError if any check fails. 764 """ 765 # Check: total_dst_blocks is not a floating point. 766 if isinstance(total_dst_blocks, float): 767 raise error.PayloadError('%s: contains invalid data type of ' 768 'total_dst_blocks.' % op_name) 769 770 # Check: Does not contain src extents. 771 if op.src_extents: 772 raise error.PayloadError('%s: contains src_extents.' % op_name) 773 774 # Check: Contains data. 775 if data_length is None: 776 raise error.PayloadError('%s: missing data_{offset,length}.' % op_name) 777 778 if op.type == common.OpType.REPLACE: 779 PayloadChecker._CheckBlocksFitLength(data_length, total_dst_blocks, 780 self.block_size, 781 op_name + '.data_length', 'dst') 782 else: 783 # Check: data_length must be smaller than the allotted dst blocks. 784 if data_length >= total_dst_blocks * self.block_size: 785 raise error.PayloadError( 786 '%s: data_length (%d) must be less than allotted dst block ' 787 'space (%d * %d).' % 788 (op_name, data_length, total_dst_blocks, self.block_size)) 789 790 def _CheckZeroOperation(self, op, op_name): 791 """Specific checks for ZERO operations. 792 793 Args: 794 op: The operation object from the manifest. 795 op_name: Operation name for error reporting. 796 797 Raises: 798 error.PayloadError if any check fails. 799 """ 800 # Check: Does not contain src extents, data_length and data_offset. 801 if op.src_extents: 802 raise error.PayloadError('%s: contains src_extents.' % op_name) 803 if op.data_length: 804 raise error.PayloadError('%s: contains data_length.' % op_name) 805 if op.data_offset: 806 raise error.PayloadError('%s: contains data_offset.' % op_name) 807 808 def _CheckAnyDiffOperation(self, op, data_length, total_dst_blocks, op_name): 809 """Specific checks for SOURCE_BSDIFF, PUFFDIFF and BROTLI_BSDIFF 810 operations. 811 812 Args: 813 op: The operation. 814 data_length: The length of the data blob associated with the operation. 815 total_dst_blocks: Total number of blocks in dst_extents. 816 op_name: Operation name for error reporting. 817 818 Raises: 819 error.PayloadError if any check fails. 820 """ 821 # Check: data_{offset,length} present. 822 if data_length is None: 823 raise error.PayloadError('%s: missing data_{offset,length}.' % op_name) 824 825 # Check: data_length is strictly smaller than the allotted dst blocks. 826 if data_length >= total_dst_blocks * self.block_size: 827 raise error.PayloadError( 828 '%s: data_length (%d) must be smaller than allotted dst space ' 829 '(%d * %d = %d).' % 830 (op_name, data_length, total_dst_blocks, self.block_size, 831 total_dst_blocks * self.block_size)) 832 833 # Check the existence of src_length and dst_length for legacy bsdiffs. 834 if op.type == common.OpType.SOURCE_BSDIFF and self.minor_version <= 3: 835 if not op.HasField('src_length') or not op.HasField('dst_length'): 836 raise error.PayloadError('%s: require {src,dst}_length.' % op_name) 837 else: 838 if op.HasField('src_length') or op.HasField('dst_length'): 839 raise error.PayloadError('%s: unneeded {src,dst}_length.' % op_name) 840 841 def _CheckSourceCopyOperation(self, data_offset, total_src_blocks, 842 total_dst_blocks, op_name): 843 """Specific checks for SOURCE_COPY. 844 845 Args: 846 data_offset: The offset of a data blob for the operation. 847 total_src_blocks: Total number of blocks in src_extents. 848 total_dst_blocks: Total number of blocks in dst_extents. 849 op_name: Operation name for error reporting. 850 851 Raises: 852 error.PayloadError if any check fails. 853 """ 854 # Check: No data_{offset,length}. 855 if data_offset is not None: 856 raise error.PayloadError('%s: contains data_{offset,length}.' % op_name) 857 858 # Check: total_src_blocks == total_dst_blocks. 859 if total_src_blocks != total_dst_blocks: 860 raise error.PayloadError( 861 '%s: total src blocks (%d) != total dst blocks (%d).' % 862 (op_name, total_src_blocks, total_dst_blocks)) 863 864 def _CheckAnySourceOperation(self, op, total_src_blocks, op_name): 865 """Specific checks for SOURCE_* operations. 866 867 Args: 868 op: The operation object from the manifest. 869 total_src_blocks: Total number of blocks in src_extents. 870 op_name: Operation name for error reporting. 871 872 Raises: 873 error.PayloadError if any check fails. 874 """ 875 # Check: total_src_blocks != 0. 876 if total_src_blocks == 0: 877 raise error.PayloadError('%s: no src blocks in a source op.' % op_name) 878 879 # Check: src_sha256_hash present in minor version >= 3. 880 if self.minor_version >= 3 and op.src_sha256_hash is None: 881 raise error.PayloadError('%s: source hash missing.' % op_name) 882 883 def _CheckOperation(self, op, op_name, old_block_counters, new_block_counters, 884 old_usable_size, new_usable_size, prev_data_offset, 885 blob_hash_counts): 886 """Checks a single update operation. 887 888 Args: 889 op: The operation object. 890 op_name: Operation name string for error reporting. 891 old_block_counters: Arrays of block read counters. 892 new_block_counters: Arrays of block write counters. 893 old_usable_size: The overall usable size for src data in bytes. 894 new_usable_size: The overall usable size for dst data in bytes. 895 prev_data_offset: Offset of last used data bytes. 896 blob_hash_counts: Counters for hashed/unhashed blobs. 897 898 Returns: 899 The amount of data blob associated with the operation. 900 901 Raises: 902 error.PayloadError if any check has failed. 903 """ 904 # Check extents. 905 total_src_blocks = self._CheckExtents( 906 op.src_extents, old_usable_size, old_block_counters, 907 op_name + '.src_extents') 908 total_dst_blocks = self._CheckExtents( 909 op.dst_extents, new_usable_size, new_block_counters, 910 op_name + '.dst_extents') 911 912 # Check: data_offset present <==> data_length present. 913 data_offset = self._CheckOptionalField(op, 'data_offset', None) 914 data_length = self._CheckOptionalField(op, 'data_length', None) 915 self._CheckPresentIff(data_offset, data_length, 'data_offset', 916 'data_length', op_name) 917 918 # Check: At least one dst_extent. 919 if not op.dst_extents: 920 raise error.PayloadError('%s: dst_extents is empty.' % op_name) 921 922 # Check {src,dst}_length, if present. 923 if op.HasField('src_length'): 924 self._CheckLength(op.src_length, total_src_blocks, op_name, 'src_length') 925 if op.HasField('dst_length'): 926 self._CheckLength(op.dst_length, total_dst_blocks, op_name, 'dst_length') 927 928 if op.HasField('data_sha256_hash'): 929 blob_hash_counts['hashed'] += 1 930 931 # Check: Operation carries data. 932 if data_offset is None: 933 raise error.PayloadError( 934 '%s: data_sha256_hash present but no data_{offset,length}.' % 935 op_name) 936 937 # Check: Hash verifies correctly. 938 actual_hash = hashlib.sha256(self.payload.ReadDataBlob(data_offset, 939 data_length)) 940 if op.data_sha256_hash != actual_hash.digest(): 941 raise error.PayloadError( 942 '%s: data_sha256_hash (%s) does not match actual hash (%s).' % 943 (op_name, common.FormatSha256(op.data_sha256_hash), 944 common.FormatSha256(actual_hash.digest()))) 945 elif data_offset is not None: 946 if self.allow_unhashed: 947 blob_hash_counts['unhashed'] += 1 948 else: 949 raise error.PayloadError('%s: unhashed operation not allowed.' % 950 op_name) 951 952 if data_offset is not None: 953 # Check: Contiguous use of data section. 954 if data_offset != prev_data_offset: 955 raise error.PayloadError( 956 '%s: data offset (%d) not matching amount used so far (%d).' % 957 (op_name, data_offset, prev_data_offset)) 958 959 # Type-specific checks. 960 if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ, 961 common.OpType.REPLACE_XZ): 962 self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name) 963 elif op.type == common.OpType.ZERO and self.minor_version >= 4: 964 self._CheckZeroOperation(op, op_name) 965 elif op.type == common.OpType.SOURCE_COPY and self.minor_version >= 2: 966 self._CheckSourceCopyOperation(data_offset, total_src_blocks, 967 total_dst_blocks, op_name) 968 self._CheckAnySourceOperation(op, total_src_blocks, op_name) 969 elif op.type == common.OpType.SOURCE_BSDIFF and self.minor_version >= 2: 970 self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name) 971 self._CheckAnySourceOperation(op, total_src_blocks, op_name) 972 elif op.type == common.OpType.BROTLI_BSDIFF and self.minor_version >= 4: 973 self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name) 974 self._CheckAnySourceOperation(op, total_src_blocks, op_name) 975 elif op.type == common.OpType.PUFFDIFF and self.minor_version >= 5: 976 self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name) 977 self._CheckAnySourceOperation(op, total_src_blocks, op_name) 978 else: 979 raise error.PayloadError( 980 'Operation %s (type %d) not allowed in minor version %d' % 981 (op_name, op.type, self.minor_version)) 982 return data_length if data_length is not None else 0 983 984 def _SizeToNumBlocks(self, size): 985 """Returns the number of blocks needed to contain a given byte size.""" 986 return (size + self.block_size - 1) // self.block_size 987 988 def _AllocBlockCounters(self, total_size): 989 """Returns a freshly initialized array of block counters. 990 991 Note that the generated array is not portable as is due to byte-ordering 992 issues, hence it should not be serialized. 993 994 Args: 995 total_size: The total block size in bytes. 996 997 Returns: 998 An array of unsigned short elements initialized to zero, one for each of 999 the blocks necessary for containing the partition. 1000 """ 1001 return array.array('H', 1002 itertools.repeat(0, self._SizeToNumBlocks(total_size))) 1003 1004 def _CheckOperations(self, operations, report, base_name, old_fs_size, 1005 new_fs_size, old_usable_size, new_usable_size, 1006 prev_data_offset): 1007 """Checks a sequence of update operations. 1008 1009 Args: 1010 operations: The sequence of operations to check. 1011 report: The report object to add to. 1012 base_name: The name of the operation block. 1013 old_fs_size: The old filesystem size in bytes. 1014 new_fs_size: The new filesystem size in bytes. 1015 old_usable_size: The overall usable size of the old partition in bytes. 1016 new_usable_size: The overall usable size of the new partition in bytes. 1017 prev_data_offset: Offset of last used data bytes. 1018 1019 Returns: 1020 The total data blob size used. 1021 1022 Raises: 1023 error.PayloadError if any of the checks fails. 1024 """ 1025 # The total size of data blobs used by operations scanned thus far. 1026 total_data_used = 0 1027 # Counts of specific operation types. 1028 op_counts = { 1029 common.OpType.REPLACE: 0, 1030 common.OpType.REPLACE_BZ: 0, 1031 common.OpType.REPLACE_XZ: 0, 1032 common.OpType.ZERO: 0, 1033 common.OpType.SOURCE_COPY: 0, 1034 common.OpType.SOURCE_BSDIFF: 0, 1035 common.OpType.PUFFDIFF: 0, 1036 common.OpType.BROTLI_BSDIFF: 0, 1037 } 1038 # Total blob sizes for each operation type. 1039 op_blob_totals = { 1040 common.OpType.REPLACE: 0, 1041 common.OpType.REPLACE_BZ: 0, 1042 common.OpType.REPLACE_XZ: 0, 1043 # SOURCE_COPY operations don't have blobs. 1044 common.OpType.SOURCE_BSDIFF: 0, 1045 common.OpType.PUFFDIFF: 0, 1046 common.OpType.BROTLI_BSDIFF: 0, 1047 } 1048 # Counts of hashed vs unhashed operations. 1049 blob_hash_counts = { 1050 'hashed': 0, 1051 'unhashed': 0, 1052 } 1053 1054 # Allocate old and new block counters. 1055 old_block_counters = (self._AllocBlockCounters(old_usable_size) 1056 if old_fs_size else None) 1057 new_block_counters = self._AllocBlockCounters(new_usable_size) 1058 1059 # Process and verify each operation. 1060 op_num = 0 1061 for op, op_name in common.OperationIter(operations, base_name): 1062 op_num += 1 1063 1064 # Check: Type is valid. 1065 if op.type not in op_counts: 1066 raise error.PayloadError('%s: invalid type (%d).' % (op_name, op.type)) 1067 op_counts[op.type] += 1 1068 1069 curr_data_used = self._CheckOperation( 1070 op, op_name, old_block_counters, new_block_counters, 1071 old_usable_size, new_usable_size, 1072 prev_data_offset + total_data_used, blob_hash_counts) 1073 if curr_data_used: 1074 op_blob_totals[op.type] += curr_data_used 1075 total_data_used += curr_data_used 1076 1077 # Report totals and breakdown statistics. 1078 report.AddField('total operations', op_num) 1079 report.AddField( 1080 None, 1081 histogram.Histogram.FromCountDict(op_counts, 1082 key_names=common.OpType.NAMES), 1083 indent=1) 1084 report.AddField('total blobs', sum(blob_hash_counts.values())) 1085 report.AddField(None, 1086 histogram.Histogram.FromCountDict(blob_hash_counts), 1087 indent=1) 1088 report.AddField('total blob size', _AddHumanReadableSize(total_data_used)) 1089 report.AddField( 1090 None, 1091 histogram.Histogram.FromCountDict(op_blob_totals, 1092 formatter=_AddHumanReadableSize, 1093 key_names=common.OpType.NAMES), 1094 indent=1) 1095 1096 # Report read/write histograms. 1097 if old_block_counters: 1098 report.AddField('block read hist', 1099 histogram.Histogram.FromKeyList(old_block_counters), 1100 linebreak=True, indent=1) 1101 1102 new_write_hist = histogram.Histogram.FromKeyList( 1103 new_block_counters[:self._SizeToNumBlocks(new_fs_size)]) 1104 report.AddField('block write hist', new_write_hist, linebreak=True, 1105 indent=1) 1106 1107 # Check: Full update must write each dst block once. 1108 if self.payload_type == _TYPE_FULL and new_write_hist.GetKeys() != [1]: 1109 raise error.PayloadError( 1110 '%s: not all blocks written exactly once during full update.' % 1111 base_name) 1112 1113 return total_data_used 1114 1115 def _CheckSignatures(self, report, pubkey_file_name): 1116 """Checks a payload's signature block.""" 1117 sigs_raw = self.payload.ReadDataBlob(self.sigs_offset, self.sigs_size) 1118 sigs = update_metadata_pb2.Signatures() 1119 sigs.ParseFromString(sigs_raw) 1120 report.AddSection('signatures') 1121 1122 # Check: At least one signature present. 1123 if not sigs.signatures: 1124 raise error.PayloadError('Signature block is empty.') 1125 1126 # Check that we don't have the signature operation blob at the end (used to 1127 # be for major version 1). 1128 last_partition = self.payload.manifest.partitions[-1] 1129 if last_partition.operations: 1130 last_op = last_partition.operations[-1] 1131 # Check: signatures_{offset,size} must match the last (fake) operation. 1132 if (last_op.type == common.OpType.REPLACE and 1133 last_op.data_offset == self.sigs_offset and 1134 last_op.data_length == self.sigs_size): 1135 raise error.PayloadError('It seems like the last operation is the ' 1136 'signature blob. This is an invalid payload.') 1137 1138 # Compute the checksum of all data up to signature blob. 1139 # TODO(garnold) we're re-reading the whole data section into a string 1140 # just to compute the checksum; instead, we could do it incrementally as 1141 # we read the blobs one-by-one, under the assumption that we're reading 1142 # them in order (which currently holds). This should be reconsidered. 1143 payload_hasher = self.payload.manifest_hasher.copy() 1144 common.Read(self.payload.payload_file, self.sigs_offset, 1145 offset=self.payload.data_offset, hasher=payload_hasher) 1146 1147 for sig, sig_name in common.SignatureIter(sigs.signatures, 'signatures'): 1148 sig_report = report.AddSubReport(sig_name) 1149 1150 # Check: Signature contains mandatory fields. 1151 self._CheckMandatoryField(sig, 'version', sig_report, sig_name) 1152 self._CheckMandatoryField(sig, 'data', None, sig_name) 1153 sig_report.AddField('data len', len(sig.data)) 1154 1155 # Check: Signatures pertains to actual payload hash. 1156 if sig.version == 1: 1157 self._CheckSha256Signature(sig.data, pubkey_file_name, 1158 payload_hasher.digest(), sig_name) 1159 else: 1160 raise error.PayloadError('Unknown signature version (%d).' % 1161 sig.version) 1162 1163 def Run(self, pubkey_file_name=None, metadata_sig_file=None, metadata_size=0, 1164 part_sizes=None, report_out_file=None): 1165 """Checker entry point, invoking all checks. 1166 1167 Args: 1168 pubkey_file_name: Public key used for signature verification. 1169 metadata_sig_file: Metadata signature, if verification is desired. 1170 metadata_size: Metadata size, if verification is desired. 1171 part_sizes: Mapping of partition label to size in bytes (default: infer 1172 based on payload type and version or filesystem). 1173 report_out_file: File object to dump the report to. 1174 1175 Raises: 1176 error.PayloadError if payload verification failed. 1177 """ 1178 if not pubkey_file_name: 1179 pubkey_file_name = _DEFAULT_PUBKEY_FILE_NAME 1180 1181 report = _PayloadReport() 1182 1183 # Get payload file size. 1184 self.payload.payload_file.seek(0, 2) 1185 payload_file_size = self.payload.payload_file.tell() 1186 self.payload.ResetFile() 1187 1188 try: 1189 # Check metadata_size (if provided). 1190 if metadata_size and self.payload.metadata_size != metadata_size: 1191 raise error.PayloadError('Invalid payload metadata size in payload(%d) ' 1192 'vs given(%d)' % (self.payload.metadata_size, 1193 metadata_size)) 1194 1195 # Check metadata signature (if provided). 1196 if metadata_sig_file: 1197 metadata_sig = base64.b64decode(metadata_sig_file.read()) 1198 self._CheckSha256Signature(metadata_sig, pubkey_file_name, 1199 self.payload.manifest_hasher.digest(), 1200 'metadata signature') 1201 1202 # Part 1: Check the file header. 1203 report.AddSection('header') 1204 # Check: Payload version is valid. 1205 if self.payload.header.version not in (1, 2): 1206 raise error.PayloadError('Unknown payload version (%d).' % 1207 self.payload.header.version) 1208 report.AddField('version', self.payload.header.version) 1209 report.AddField('manifest len', self.payload.header.manifest_len) 1210 1211 # Part 2: Check the manifest. 1212 self._CheckManifest(report, part_sizes) 1213 assert self.payload_type, 'payload type should be known by now' 1214 1215 # Make sure deprecated values are not present in the payload. 1216 for field in ('install_operations', 'kernel_install_operations'): 1217 self._CheckRepeatedElemNotPresent(self.payload.manifest, field, 1218 'manifest') 1219 for field in ('old_kernel_info', 'old_rootfs_info', 1220 'new_kernel_info', 'new_rootfs_info'): 1221 self._CheckElemNotPresent(self.payload.manifest, field, 'manifest') 1222 1223 total_blob_size = 0 1224 for part, operations in ((p.partition_name, p.operations) 1225 for p in self.payload.manifest.partitions): 1226 report.AddSection('%s operations' % part) 1227 1228 new_fs_usable_size = self.new_fs_sizes[part] 1229 old_fs_usable_size = self.old_fs_sizes[part] 1230 1231 if part_sizes is not None and part_sizes.get(part, None): 1232 new_fs_usable_size = old_fs_usable_size = part_sizes[part] 1233 1234 # TODO(chromium:243559) only default to the filesystem size if no 1235 # explicit size provided *and* the partition size is not embedded in the 1236 # payload; see issue for more details. 1237 total_blob_size += self._CheckOperations( 1238 operations, report, '%s_install_operations' % part, 1239 self.old_fs_sizes[part], self.new_fs_sizes[part], 1240 old_fs_usable_size, new_fs_usable_size, total_blob_size) 1241 1242 # Check: Operations data reach the end of the payload file. 1243 used_payload_size = self.payload.data_offset + total_blob_size 1244 # Major versions 2 and higher have a signature at the end, so it should be 1245 # considered in the total size of the image. 1246 if self.sigs_size: 1247 used_payload_size += self.sigs_size 1248 1249 if used_payload_size != payload_file_size: 1250 raise error.PayloadError( 1251 'Used payload size (%d) different from actual file size (%d).' % 1252 (used_payload_size, payload_file_size)) 1253 1254 # Part 4: Handle payload signatures message. 1255 if self.check_payload_sig and self.sigs_size: 1256 self._CheckSignatures(report, pubkey_file_name) 1257 1258 # Part 5: Summary. 1259 report.AddSection('summary') 1260 report.AddField('update type', self.payload_type) 1261 1262 report.Finalize() 1263 finally: 1264 if report_out_file: 1265 report.Dump(report_out_file) 1266