Skip to content

Commit

Permalink
Migrate python2 scripts to python3
Browse files Browse the repository at this point in the history
Tests Done: Boot check on civ and base_aaos

Tracked-On: OAM-124331
Signed-off-by: Ankit Agrawal <[email protected]>
Signed-off-by: Salini Venate <[email protected]>
  • Loading branch information
ankithbti52509 authored and SaliniVenate committed Sep 5, 2024
1 parent 574c7a3 commit 320641b
Show file tree
Hide file tree
Showing 8 changed files with 104 additions and 99 deletions.
8 changes: 4 additions & 4 deletions bootloader_from_zip
Original file line number Diff line number Diff line change
Expand Up @@ -83,12 +83,12 @@ def main(argv):
sys.exit(1)

if not OPTIONS.zipfile:
print "--zipfile is required"
print ("--zipfile is required")
common.Usage(__doc__)
sys.exit(1)

tf = tempfile.NamedTemporaryFile()
tf.write("foo")
tf.write(b"foo")
tf.flush()

extra_files = OPTIONS.bootimage
Expand All @@ -106,9 +106,9 @@ if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError, e:
except common.ExternalError as e:
print
print " ERROR: %s" % (e,)
print (" ERROR: %s" % (e,))
print
sys.exit(1)

60 changes: 28 additions & 32 deletions create_gpt_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
Script to create a GPT/UEFI image or to show information it contains.
"""

from sys import version_info
from sys import exit, version_info

if version_info < (2, 7, 3):
exit('Python version must be 2.7.3 or higher')
if version_info < (3, 0):
exit('Python version must be 3.0 or higher')

from logging import (debug, info, error, DEBUG, INFO, getLogger,
basicConfig)
Expand All @@ -32,10 +32,7 @@
from binascii import crc32
from re import compile as re_compile
from collections import namedtuple
if version_info < (3, 0, 1):
from ConfigParser import SafeConfigParser, ParsingError, NoOptionError, NoSectionError
else:
from configparser import SafeConfigParser, ParsingError, NoOptionError, NoSectionError
from configparser import ConfigParser, ParsingError, NoOptionError, NoSectionError
from math import floor, log


Expand Down Expand Up @@ -69,12 +66,11 @@ class MBRInfos(object):

_FMT = '<IIII430s16s48s2s'

_PART_ENTRY = ('\x00\x00\x00\x00\xee\x00\x00\x00\x01\x00\x00\x00\x00\x00'
'\xee\x00')
_PART_ENTRY = b'\x00\x00\x00\x00\xee\x00\x00\x00\x01\x00\x00\x00\x00\x00\xee\x00'

def __init__(self, block_size=512):
self.block_size = block_size
self.raw = ''
self.raw = b''

# TODO use decorators and properties to subtitute by r/w access in the
# raw attribute with pack and unpack function all these attributes
Expand All @@ -83,17 +79,17 @@ def __init__(self, block_size=512):
self.os_type = 0
self.lba_start = 0
self.lba_size = 0
self.dummy_1 = ''
self.dummy_2 = ''
self.dummy_3 = ''
self.sign = '\x55\xaa'
self.dummy_1 = b''
self.dummy_2 = b''
self.dummy_3 = b''
self.sign = b'\x55\xaa'

def __repr__(self):
# converts the size
if self.lba_size > 0:
units = ('KBytes', 'MBytes', 'GBytes')
index = int(floor(log(self.lba_size, 1024)))
computed_size = round(self.lba_size / (1024**index), 2)
computed_size = round(self.lba_size // (1024**index), 2)
human_size = '{0} {1}'.format(computed_size, units[index])
else:
human_size = '0 Bytes'
Expand Down Expand Up @@ -135,8 +131,8 @@ def write(self, img_file, offset=0):
Used to write MBR in an image file
"""
self.raw = pack(MBRInfos._FMT, self.boot, self.os_type,
self.lba_start, self.lba_size, '',
MBRInfos._PART_ENTRY, '', self.sign)
self.lba_start, self.lba_size, b'\x00',
MBRInfos._PART_ENTRY, b'\x00', self.sign)
img_file.seek(offset)
img_file.write(self.raw)

Expand Down Expand Up @@ -199,12 +195,12 @@ class GPTHeaderInfos(object):
_FMT = '<8s4sII4xQQQQ16sQIII'

def __init__(self, img_size=2147483648, block_size=512, size=92):
self.raw = ''
self.raw = b''

# TODO use decorators and properties to subtitute by r/w access in the
# raw attribute with pack and unpack function all these attributes
self.sign = 'EFI PART'
self.rev = '\x00\x00\x01\x00'
self.sign = b'EFI PART'
self.rev = b'\x00\x00\x01\x00'
self.size = size

# sets the length and the entry size of the GPT partition table with
Expand All @@ -213,13 +209,13 @@ def __init__(self, img_size=2147483648, block_size=512, size=92):
self.entry_size = 128

# calculates the size of image in block
size_in_block = img_size / block_size
size_in_block = img_size // block_size

# sets the lba backup at the value of first lba used by GPT backup
self.lba_backup = size_in_block - 1

# calculates the size of the partition table in block
table_size = (self.table_length * self.entry_size) / block_size
table_size = (self.table_length * self.entry_size) // block_size

# sets the lba first at the first usable lba for a partition
self.lba_first = table_size + 2
Expand Down Expand Up @@ -300,7 +296,7 @@ def write(self, img_file, offset, block_size):
img_file.write(self.raw)

# writes zero on unused blocks of GPT header
raw_stuffing = '\x00' * (block_size - len(self.raw))
raw_stuffing = b'\x00' * (block_size - len(self.raw))
img_file.write(raw_stuffing)

# saves the end of the GPT header
Expand Down Expand Up @@ -598,7 +594,7 @@ def _read_json(self, block_size):
"""
Used to read a JSON TLB partition file
"""
with open(self.path, 'r') as tlb_file:
with open(self.path, 'rb') as tlb_file:
re_parser = re_compile(r'^add\s-b\s(?P<begin>\w+)\s-s\s'
'(?P<size>[\w$()-]+)\s-t\s'
'(?P<type>\w+)\s-u\s'
Expand Down Expand Up @@ -690,7 +686,7 @@ def _contruct_tlb_info(self, start_lba, cfg, block_size, parts):
readlen = cfg.getint(partname, 'len')

if readlen > 0:
size = (readlen * 1024 * 1024) / block_size
size = (readlen * 1024 * 1024) // block_size
start_lba = begin + size
else:
size = readlen
Expand Down Expand Up @@ -724,7 +720,7 @@ def _contruct_tlb_grp_info(self, start_lba, cfg, block_size, parts):
begin = start_lba

if readlen > 0:
size = (readlen * 1024 * 1024) / block_size
size = (readlen * 1024 * 1024) // block_size
start_lba = begin + size
else:
size = readlen
Expand All @@ -738,7 +734,7 @@ def _read_ini(self, block_size):
Used to read a INI TLB partition file
"""
# sets a parser to read the INI TLB partition file
cfg = SafeConfigParser()
cfg = ConfigParser(strict=False)
try:
cfg.read(self.path)

Expand All @@ -757,7 +753,7 @@ def _read_ini(self, block_size):

except NoOptionError:
# set start lba to offset 1M bytes, align with kernelflinger
start_lba_prev = 1024 * 1024 / block_size
start_lba_prev = 1024 * 1024 // block_size
info('The start_lba value is undefined in the TLB partition file,'
' the default value is used: {0}'.format(start_lba_prev))

Expand Down Expand Up @@ -802,7 +798,7 @@ def compute_last_size_entry(self, img_size, block_size, entry_size,
last = -1
# reserve the size for primary and secondary gpt
MB = 1024 * 1024
remaining_size = (img_size - MB) / block_size - 2048
remaining_size = (img_size - MB) // block_size - 2048
for pos, entry in enumerate(self):
debug('Entry size: {0}'.format(entry.size))
if entry.size < 0:
Expand Down Expand Up @@ -1035,7 +1031,7 @@ def _write_partitions(self, img_file, tlb_infos, binaries_path):
# no binary file used to build the partition or slot_b case
label = tlb_part.label[0:]
if bin_path == 'none' or label[len(label)-2:] == '_b':
line = '\0'
line = b'\0'
img_file.seek(offset)
img_file.write(line)
bin_size = 0
Expand All @@ -1048,7 +1044,7 @@ def _write_partitions(self, img_file, tlb_infos, binaries_path):
# checks if partition size is greather or equal to the binary file
bin_size_in_bytes = stat(bin_path).st_size
part_size_in_bytes = tlb_part.size * self.block_size
bin_size = bin_size_in_bytes / self.block_size
bin_size = bin_size_in_bytes // self.block_size
if tlb_part.size < bin_size:
error('Size of binary file {0} ({1} Bytes) is greather than '
'{2} partition size ({3} Bytes)'.format(bin_path,
Expand Down Expand Up @@ -1081,7 +1077,7 @@ def write(self, tlb_infos, binaries_path):

# fill output image header with 0x00: MBR size + GPT header size +
# (partition table length * entry size)
zero = '\x00' * (2 * self.block_size +
zero = b'\x00' * (2 * self.block_size +
self.gpt_header.table_length *
self.gpt_header.entry_size)
img_file.seek(0)
Expand Down
60 changes: 30 additions & 30 deletions generate_factory_images
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import os

_FLASHALL_FILENAME = "flash-all.sh"
# chmod (octal) -rwxr-x--x
_PERMS = 0751
_PERMS = 0o751
_FLASH_HEADER = """#!/bin/bash
# Copyright 2012 The Android Open Source Project
Expand Down Expand Up @@ -103,30 +103,30 @@ def ConvertToDOSFormat(filename):


def AddFlashScript(filename, tar, commands, windows):
print "Archiving", filename
tf = tempfile.NamedTemporaryFile(delete=False)
if (windows):
tf.write(_WIN_FLASH_HEADER)
else:
tf.write(_FLASH_HEADER)

for c in commands:
if windows:
tf.write(c.get_windows_command())
print("Archiving", filename)
with tempfile.NamedTemporaryFile(delete=False, mode='w+') as tf:
if (windows):
tf.write(_WIN_FLASH_HEADER)
else:
tf.write(c.get_linux_command())
tf.write(_FLASH_HEADER)

if (windows):
tf.write(_WIN_FLASH_FOOTER)
else:
tf.write(_FLASH_FOOTER)
for c in commands:
if windows:
tf.write(c.get_windows_command())
else:
tf.write(c.get_linux_command())

if (windows):
tf.write(_WIN_FLASH_FOOTER)
else:
tf.write(_FLASH_FOOTER)

tf.close()
tf_name = tf.name
if (windows):
ConvertToDOSFormat(tf.name)
chmod(tf.name, _PERMS)
tar.add(tf.name, arcname=path.basename(filename))
os.unlink(tf.name)
ConvertToDOSFormat(tf_name)
chmod(tf_name, _PERMS)
tar.add(tf_name, arcname=path.basename(filename))
os.unlink(tf_name)

def RequireFile(filename):
"""Ensure file exists"""
Expand All @@ -146,8 +146,8 @@ class CommandlineParser(ArgumentParser):
self.description = __doc__

def error(self, message):
print >>stderr, "ERROR: {}".format(message)
print >>stderr, "\n------\n"
print("ERROR: {}".format(message), file=stderr)
print("\n------\n", file=stderr)
self.print_help()
exit(2)

Expand Down Expand Up @@ -230,31 +230,31 @@ def main():
archive_name = args.output

# Create Archive
print "Creating archive: " + archive_name
print("Creating archive: " + archive_name)
tar = TarOpen(archive_name, "w:gz")

for src_path, dst_path in files:
print "Archiving " + src_path
print("Archiving " + src_path)
RequireFile(src_path)
tar.add(src_path, arcname=dst_path)

# 'fastboot update' covers the additional AOSP pieces, add this to the
# command list now
commands.append(UpdateCommand(update_fn, True))
print "Archiving " + args.update_archive
print("Archiving " + args.update_archive)
RequireFile(args.update_archive)
tar.add(args.update_archive, update_fn)
AddFlashScript(_FLASHALL_FILENAME, tar, commands, windows=False)
AddFlashScript(_WIN_FLASHALL_FILENAME, tar, commands, windows=True)

tar.close()

print "Done."
print("Done.")

if __name__ == "__main__":
try:
exit(main())
except Usage, err:
print >>stderr, "ERROR: {}".format(err.msg)
print >>stderr, " for help use --help"
except Usage as err:
print("ERROR: {}".format(err.msg), file=stderr)
print(" for help use --help", file=stderr)
exit(2)
6 changes: 3 additions & 3 deletions releasetools/bootloader_from_target_files
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def main(argv):
common.Usage(__doc__)
sys.exit(1)

print "unzipping target-files..."
print ("unzipping target-files...")
#OPTIONS.input_tmp = common.UnzipTemp(args[0])
OPTIONS.input_tmp = args[0]
#input_zip = zipfile.ZipFile(args[0], "r")
Expand Down Expand Up @@ -89,9 +89,9 @@ if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError, e:
except common.ExternalError as e:
print
print " ERROR: %s" % (e,)
print (" ERROR: %s" % (e,))
print
sys.exit(1)
finally:
Expand Down
9 changes: 8 additions & 1 deletion releasetools/flash_cmd_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,14 @@ def parse_config(ips, variant, platform):
results_list = []
for k,v in results.items():
results_list.append((k,v))
flist = [f.rsplit(':', 1) for f in set(files)]
unique_files = []
for file in files:
# If the number is not already in the unique_numbers list, add it
if file not in unique_files:
unique_files.append(file)

flist = [f.rsplit(':', 1) for f in unique_files]

return results_list, flist


Expand Down
Loading

0 comments on commit 320641b

Please sign in to comment.