Import of the watch repository from Pebble
1
tools/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
serial_dump.txt
|
14
tools/__init__.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
90
tools/accessory_flash_imaging.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
|
||||
from accessory_imaging import AccessoryImaging
|
||||
import insert_firmware_descr
|
||||
|
||||
|
||||
def flash_firmware(tty, path, progress):
|
||||
dev = AccessoryImaging(tty)
|
||||
image = insert_firmware_descr.insert_firmware_description_struct(path)
|
||||
dev.flash_image(image, dev.Frame.REGION_FW_SCRATCH, progress)
|
||||
|
||||
|
||||
def flash_prf(tty, path, progress):
|
||||
dev = AccessoryImaging(tty)
|
||||
image = insert_firmware_descr.insert_firmware_description_struct(path)
|
||||
dev.flash_image(image, dev.Frame.REGION_PRF, progress)
|
||||
|
||||
|
||||
def flash_resources(tty, path, progress):
|
||||
dev = AccessoryImaging(tty)
|
||||
with open(path, 'rb') as inf:
|
||||
image = inf.read()
|
||||
dev.flash_image(image, dev.Frame.REGION_RESOURCES, progress)
|
||||
|
||||
|
||||
def read_pfs(tty, path, progress):
|
||||
dev = AccessoryImaging(tty)
|
||||
with open(path, 'wb') as output_file:
|
||||
data = dev.flash_read(dev.Frame.REGION_PFS, progress)
|
||||
output_file.write("".join(data))
|
||||
|
||||
|
||||
def write_pfs(tty, path, progress):
|
||||
dev = AccessoryImaging(tty)
|
||||
with open(path, 'rb') as input_file:
|
||||
data = input_file.read()
|
||||
dev.flash_image(data, dev.Frame.REGION_PFS, progress)
|
||||
|
||||
|
||||
def read_coredump(tty, path, progress):
|
||||
dev = AccessoryImaging(tty)
|
||||
with open(path, 'wb') as output_file:
|
||||
data = dev.flash_read(dev.Frame.REGION_COREDUMP, progress)
|
||||
output_file.write("".join(data))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='A factory tool to load binary data into '
|
||||
'Pebble\'s external flash storage over the '
|
||||
'accessory port.')
|
||||
parser.add_argument('type', choices=['prf', 'firmware', 'resources', 'read_pfs', 'write_pfs',
|
||||
'read_coredump'],
|
||||
help='The type of binary being loaded')
|
||||
parser.add_argument('tty', help='The target serial port')
|
||||
parser.add_argument('path', help='Path to the binary to be loaded or the file to save flash')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.type == 'prf':
|
||||
flash_prf(args.tty, args.path, True)
|
||||
elif args.type == 'firmware':
|
||||
flash_firmware(args.tty, args.path, True)
|
||||
elif args.type == 'resources':
|
||||
flash_resources(args.tty, args.path, True)
|
||||
elif args.type == 'read_pfs':
|
||||
read_pfs(args.tty, args.path, True)
|
||||
elif args.type == 'write_pfs':
|
||||
write_pfs(args.tty, args.path, True)
|
||||
elif args.type == 'read_coredump':
|
||||
read_coredump(args.tty, args.path, True)
|
||||
else:
|
||||
assert False, 'This should never happen'
|
339
tools/accessory_imaging.py
Normal file
|
@ -0,0 +1,339 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import struct
|
||||
import time
|
||||
from binascii import crc32
|
||||
from random import randint
|
||||
|
||||
from hdlc import HDLCDecoder, hdlc_encode_data
|
||||
from serial_port_wrapper import SerialPortWrapper
|
||||
|
||||
|
||||
CRC_RESIDUE = crc32('\0\0\0\0')
|
||||
READ_TIMEOUT = 1
|
||||
ACCESSORY_CONSOLE_BAUD_RATE = 115200
|
||||
ACCESSORY_IMAGING_BAUD_RATE = 921600
|
||||
|
||||
|
||||
class AccessoryImagingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AccessoryImaging(object):
|
||||
class Frame(object):
|
||||
MAX_DATA_LENGTH = 1024
|
||||
|
||||
FLAG_IS_SERVER = (1 << 0)
|
||||
FLAG_VERSION = (0b111 << 1)
|
||||
|
||||
OPCODE_PING = 0x01
|
||||
OPCODE_DISCONNECT = 0x02
|
||||
OPCODE_RESET = 0x03
|
||||
OPCODE_FLASH_GEOMETRY = 0x11
|
||||
OPCODE_FLASH_ERASE = 0x12
|
||||
OPCODE_FLASH_WRITE = 0x13
|
||||
OPCODE_FLASH_CRC = 0x14
|
||||
OPCODE_FLASH_FINALIZE = 0x15
|
||||
OPCODE_FLASH_READ = 0x16
|
||||
|
||||
REGION_PRF = 0x01
|
||||
REGION_RESOURCES = 0x02
|
||||
REGION_FW_SCRATCH = 0x03
|
||||
REGION_PFS = 0x04
|
||||
REGION_COREDUMP = 0x05
|
||||
|
||||
FLASH_READ_FLAG_ALL_SAME = (1 << 0)
|
||||
|
||||
def __init__(self, raw_data):
|
||||
self._data = raw_data
|
||||
|
||||
def __repr__(self):
|
||||
if self.is_valid():
|
||||
return '<{}@{:#x}: opcode={}>' \
|
||||
.format(self.__class__.__name__, id(self), self.get_opcode())
|
||||
else:
|
||||
return '<{}@{:#x}: INVALID>' \
|
||||
.format(self.__class__.__name__, id(self))
|
||||
|
||||
def is_valid(self):
|
||||
# minimum packet size is 6 (2 bytes of header and 4 bytes of checksum)
|
||||
return self._data and len(self._data) >= 6 and crc32(self._data) == CRC_RESIDUE
|
||||
|
||||
def flag_is_server(self):
|
||||
return bool(ord(self._data[0]) & self.FLAG_IS_SERVER)
|
||||
|
||||
def flag_version(self):
|
||||
return (ord(self._data[0]) & self.FLAG_VERSION) >> 1
|
||||
|
||||
def get_opcode(self):
|
||||
return ord(self._data[1])
|
||||
|
||||
def get_payload(self):
|
||||
return self._data[2:-4]
|
||||
|
||||
class FlashBlock(object):
|
||||
def __init__(self, addr, data):
|
||||
self._addr = addr
|
||||
self._data = data
|
||||
self._crc = crc32(self._data) & 0xFFFFFFFF
|
||||
self._validated = False
|
||||
|
||||
def get_write_payload(self):
|
||||
return struct.pack('<I', self._addr) + self._data
|
||||
|
||||
def get_crc_payload(self):
|
||||
return struct.pack('<II', self._addr, len(self._data))
|
||||
|
||||
def validate(self, raw_response):
|
||||
addr, length, crc = struct.unpack('<III', raw_response)
|
||||
# check if this response completely includes this block
|
||||
if addr <= self._addr and (addr + length) >= self._addr + len(self._data):
|
||||
self._validated = (crc == self._crc)
|
||||
|
||||
def is_validated(self):
|
||||
return self._validated
|
||||
|
||||
def __repr__(self):
|
||||
return '<{}@{:#x}: addr={:#x}, length={}>' \
|
||||
.format(self.__class__.__name__, id(self), self._addr, len(self._data))
|
||||
|
||||
def __init__(self, tty):
|
||||
self._serial = SerialPortWrapper(tty, None, ACCESSORY_CONSOLE_BAUD_RATE)
|
||||
self._hdlc_decoder = HDLCDecoder()
|
||||
self._server_version = 0
|
||||
|
||||
def _send_frame(self, opcode, payload):
|
||||
data = struct.pack('<BB', 0, opcode)
|
||||
data += payload
|
||||
data += struct.pack('<I', crc32(data) & 0xFFFFFFFF)
|
||||
self._serial.write_fast(hdlc_encode_data(data))
|
||||
|
||||
def _read_frame(self):
|
||||
start_time = time.time()
|
||||
while True:
|
||||
# process any queued frames
|
||||
for frame_data in iter(self._hdlc_decoder.get_frame, None):
|
||||
frame = self.Frame(frame_data)
|
||||
if frame.is_valid() and frame.flag_is_server():
|
||||
self._server_version = frame.flag_version()
|
||||
return frame
|
||||
if (time.time() - start_time) > READ_TIMEOUT:
|
||||
return None
|
||||
self._hdlc_decoder.write(self._serial.read(0.001))
|
||||
|
||||
def _command_and_response(self, opcode, payload=''):
|
||||
retries = 5
|
||||
while True:
|
||||
self._send_frame(opcode, payload)
|
||||
frame = self._read_frame()
|
||||
if frame:
|
||||
if frame.get_opcode() != opcode:
|
||||
raise AccessoryImagingError('ERROR: Got unexpected response ({:#x}, {})'
|
||||
.format(opcode, frame))
|
||||
break
|
||||
elif --retries == 0:
|
||||
raise AccessoryImagingError('ERROR: Watch did not respond to request ({:#x})'
|
||||
.format(opcode))
|
||||
return frame.get_payload()
|
||||
|
||||
def _get_prompt(self):
|
||||
timeout = time.time() + 5
|
||||
while True:
|
||||
# we could be in stop mode, so send a few
|
||||
self._serial.write('\x03')
|
||||
self._serial.write('\x03')
|
||||
self._serial.write('\x03')
|
||||
read_data = self._serial.read()
|
||||
if read_data and read_data[-1] == '>':
|
||||
break
|
||||
time.sleep(0.5)
|
||||
if time.time() > timeout:
|
||||
raise AccessoryImagingError('ERROR: Timed-out connecting to the watch!')
|
||||
|
||||
def start(self):
|
||||
self._serial.s.baudrate = ACCESSORY_CONSOLE_BAUD_RATE
|
||||
self._get_prompt()
|
||||
self._serial.write_fast('accessory imaging start\r\n')
|
||||
self._serial.read()
|
||||
self._serial.s.baudrate = ACCESSORY_IMAGING_BAUD_RATE
|
||||
if self._server_version >= 1:
|
||||
self.Frame.MAX_DATA_LENGTH = 2048
|
||||
|
||||
def ping(self):
|
||||
payload = ''.join(chr(randint(0, 255)) for _ in range(10))
|
||||
if self._command_and_response(self.Frame.OPCODE_PING, payload) != payload:
|
||||
raise AccessoryImagingError('ERROR: Invalid ping payload in response!')
|
||||
|
||||
def disconnect(self):
|
||||
self._command_and_response(self.Frame.OPCODE_DISCONNECT)
|
||||
self._serial.s.baudrate = ACCESSORY_CONSOLE_BAUD_RATE
|
||||
|
||||
def reset(self):
|
||||
self._command_and_response(self.Frame.OPCODE_RESET)
|
||||
|
||||
def flash_geometry(self, region):
|
||||
if region == self.Frame.REGION_PFS or region == self.Frame.REGION_COREDUMP:
|
||||
# These regions require >= v1
|
||||
if self._server_version < 1:
|
||||
raise AccessoryImagingError('ERROR: Server does not support this region')
|
||||
payload = struct.pack('<B', region)
|
||||
response = self._command_and_response(self.Frame.OPCODE_FLASH_GEOMETRY, payload)
|
||||
response_region, addr, length = struct.unpack('<BII', response)
|
||||
if response_region != region or length == 0:
|
||||
raise AccessoryImagingError('ERROR: Did not get region information ({:#x})'
|
||||
.format(region))
|
||||
return addr, length
|
||||
|
||||
def flash_erase(self, addr, length):
|
||||
payload = struct.pack('<II', addr, length)
|
||||
while True:
|
||||
response = self._command_and_response(self.Frame.OPCODE_FLASH_ERASE, payload)
|
||||
response_addr, response_length, response_complete = struct.unpack('<IIB', response)
|
||||
if response_addr != addr or response_length != length:
|
||||
raise AccessoryImagingError('ERROR: Got invalid response (expected '
|
||||
'[{:#x},{:#x}], got [{:#x},{:#x}])'
|
||||
.format(addr, length, response_addr, response_length))
|
||||
elif response_complete != 0:
|
||||
break
|
||||
time.sleep(0.5)
|
||||
time.sleep(1)
|
||||
|
||||
def flash_write(self, block):
|
||||
self._send_frame(self.Frame.OPCODE_FLASH_WRITE, block.get_write_payload())
|
||||
|
||||
def flash_crc(self, blocks):
|
||||
payload = ''.join(x.get_crc_payload() for x in blocks)
|
||||
response = self._command_and_response(self.Frame.OPCODE_FLASH_CRC, payload)
|
||||
response_fmt = '<III'
|
||||
entry_size = struct.calcsize(response_fmt)
|
||||
num_entries = len(response) // entry_size
|
||||
if len(response) % entry_size != 0:
|
||||
raise AccessoryImagingError('ERROR: Invalid response length ({})'.format(len(response)))
|
||||
elif num_entries != len(blocks):
|
||||
raise AccessoryImagingError('ERROR: Invalid number of response entries ({})'
|
||||
.format(num_entries))
|
||||
responses = [response[i:i+entry_size] for i in xrange(0, len(response), entry_size)]
|
||||
assert len(responses) == len(blocks)
|
||||
return responses
|
||||
|
||||
def flash_finalize(self, region):
|
||||
payload = struct.pack('<B', region)
|
||||
response = self._command_and_response(self.Frame.OPCODE_FLASH_FINALIZE, payload)
|
||||
response_region = struct.unpack('<B', response)[0]
|
||||
if response_region != region:
|
||||
raise AccessoryImagingError('ERROR: Did not get correct region ({:#x})'.format(region))
|
||||
|
||||
def flash_read(self, region, progress):
|
||||
if progress:
|
||||
print('Connecting...')
|
||||
self.start()
|
||||
self.ping()
|
||||
|
||||
# flash reading was added in v1
|
||||
if self._server_version < 1:
|
||||
raise AccessoryImagingError('ERROR: Server does not support reading from flash')
|
||||
|
||||
addr, length = self.flash_geometry(region)
|
||||
|
||||
if progress:
|
||||
print('Reading...')
|
||||
|
||||
read_bytes = []
|
||||
last_percent = 0
|
||||
for offset in xrange(0, length, self.Frame.MAX_DATA_LENGTH):
|
||||
chunk_length = min(self.Frame.MAX_DATA_LENGTH, length - offset)
|
||||
data = struct.pack('<II', offset + addr, chunk_length)
|
||||
response = self._command_and_response(self.Frame.OPCODE_FLASH_READ, payload=data)
|
||||
# the first byte of the response is the flags (0th bit: repeat the single data byte)
|
||||
if bool(ord(response[0]) & self.Frame.FLASH_READ_FLAG_ALL_SAME):
|
||||
if len(response) != 2:
|
||||
raise AccessoryImagingError('ERROR: Invalid flash read response')
|
||||
read_bytes.extend(response[1] * chunk_length)
|
||||
else:
|
||||
read_bytes.extend(response[1:])
|
||||
if progress:
|
||||
# don't spam the progress (only every 5%)
|
||||
percent = (offset * 100) // length
|
||||
if percent >= last_percent + 5:
|
||||
print('{}% of the data read'.format(percent))
|
||||
last_percent = percent
|
||||
|
||||
self.flash_finalize(region)
|
||||
self.disconnect()
|
||||
|
||||
if progress:
|
||||
print('Done!')
|
||||
|
||||
return read_bytes
|
||||
|
||||
def flash_image(self, image, region, progress):
|
||||
if progress:
|
||||
print('Connecting...')
|
||||
self.start()
|
||||
self.ping()
|
||||
|
||||
addr, length = self.flash_geometry(region)
|
||||
if len(image) > length:
|
||||
raise AccessoryImagingError('ERROR: Image is too big! (size={}, region_length={})'
|
||||
.format(len(image), length))
|
||||
|
||||
if progress:
|
||||
print('Erasing...')
|
||||
self.flash_erase(addr, length)
|
||||
|
||||
total_blocks = []
|
||||
# the block size should be as big as possible, but we need to leave 4 bytes for the address
|
||||
block_size = self.Frame.MAX_DATA_LENGTH - 4
|
||||
for offset in xrange(0, len(image), block_size):
|
||||
total_blocks.append(self.FlashBlock(addr + offset, image[offset:offset+block_size]))
|
||||
|
||||
if progress:
|
||||
print('Writing...')
|
||||
num_total = len(total_blocks)
|
||||
num_errors = 0
|
||||
pending_blocks = [x for x in total_blocks if not x.is_validated()]
|
||||
while len(pending_blocks) > 0:
|
||||
# We will split up the outstanding blocks into packets which should be as big as
|
||||
# possible, but are limited by the fact that the flash CRC response is 12 bytes per
|
||||
# block.
|
||||
packet_size = self.Frame.MAX_DATA_LENGTH // 12
|
||||
packets = []
|
||||
for i in xrange(0, len(pending_blocks), packet_size):
|
||||
packets += [pending_blocks[i:i+packet_size]]
|
||||
for packet in packets:
|
||||
# write each of the blocks
|
||||
for block in packet:
|
||||
self.flash_write(block)
|
||||
|
||||
# CRC each of the blocks
|
||||
crc_results = self.flash_crc(packet)
|
||||
for block, result in zip(packet, crc_results):
|
||||
block.validate(result)
|
||||
|
||||
# update the pending blocks
|
||||
pending_blocks = [x for x in total_blocks if not x.is_validated()]
|
||||
if progress:
|
||||
percent = ((num_total - len(pending_blocks)) * 100) // num_total
|
||||
num_errors += len([x for x in packet if not x.is_validated()])
|
||||
print('{}% of blocks written ({} errors)'.format(percent, num_errors))
|
||||
|
||||
self.flash_finalize(region)
|
||||
if region == self.Frame.REGION_FW_SCRATCH:
|
||||
self.reset()
|
||||
else:
|
||||
self.disconnect()
|
||||
|
||||
if progress:
|
||||
print('Done!')
|
438
tools/activity/fft.py
Normal file
|
@ -0,0 +1,438 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#################################################################################################
|
||||
# Test FFT algorithm
|
||||
#
|
||||
# This is used to experiment with various step-tracking algorithms. It is a python implementation
|
||||
# of an FFT as described in:
|
||||
# "Real-valued Fast Fourier Transform Algorithm", from IEEE Transactions on Acoustics,
|
||||
# Speech, and Signal Processing, Vol. ASSP-35, No. 6, June 1987
|
||||
#
|
||||
# The firmware uses this same algorithm, but implemented in C in the prv_fft_2radix_real()
|
||||
# method of kraepelin_algorithm.c
|
||||
##################################################################################################
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import math
|
||||
|
||||
|
||||
###########################################################################################
|
||||
g_walk_10_steps = [
|
||||
[-362, -861, 69],
|
||||
[-309, -899, 45],
|
||||
[-266, -904, 21],
|
||||
[-242, -848, -134],
|
||||
[-272, -839, 34],
|
||||
[-207, -919, 14],
|
||||
[-244, -879, 93],
|
||||
[-238, -856, 91],
|
||||
[-185, -883, 37],
|
||||
[-217, -855, -156],
|
||||
[-200, -883, 25],
|
||||
[-154, -927, 42],
|
||||
[-179, -935, 71],
|
||||
[-184, -956, 32],
|
||||
[-129, -999, 99],
|
||||
[-195, -950, -112],
|
||||
[-222, -969, -164],
|
||||
[-351, -996, -190],
|
||||
[-277, -1218, -259],
|
||||
[-212, -1018, -250],
|
||||
[-209, -812, -142],
|
||||
[-182, -680, -200],
|
||||
[-257, -642, -169],
|
||||
[-269, -797, -289],
|
||||
[-142, -1107, -330],
|
||||
[-185, -909, -300],
|
||||
[-229, -706, -155],
|
||||
[-171, -750, -161],
|
||||
[-181, -811, -218],
|
||||
[-173, -845, -149],
|
||||
[-118, -887, -126],
|
||||
[-150, -871, -100],
|
||||
[-164, -908, -146],
|
||||
[-175, -958, -161],
|
||||
[-231, -952, -113],
|
||||
[-273, -1006, -205],
|
||||
[-321, -1047, -351],
|
||||
[-321, -1064, -300],
|
||||
[-262, -945, -210],
|
||||
[-298, -770, -124],
|
||||
[-338, -772, 95],
|
||||
[-325, -818, -179],
|
||||
[-329, -780, -153],
|
||||
[-280, -796, -151],
|
||||
[-230, -755, -100],
|
||||
[-234, -759, 44],
|
||||
[-248, -807, 90],
|
||||
[-217, -872, 79],
|
||||
[-204, -887, 74],
|
||||
[-189, -939, 78],
|
||||
[-220, -1014, -129],
|
||||
[-147, -1107, -129],
|
||||
[-274, -1013, -158],
|
||||
[-301, -1007, -258],
|
||||
[-351, -1131, -346],
|
||||
[-118, -1086, -355],
|
||||
[-290, -716, -213],
|
||||
[-288, -720, -290],
|
||||
[-235, -825, -344],
|
||||
[-179, -819, -243],
|
||||
[-228, -670, -185],
|
||||
[-125, -790, -145],
|
||||
[-145, -795, -207],
|
||||
[-152, -809, 76],
|
||||
[-98, -871, -115],
|
||||
[-89, -855, -111],
|
||||
[-116, -879, 84],
|
||||
[-161, -945, -172],
|
||||
[-147, -1017, -173],
|
||||
[-278, -1012, -146],
|
||||
[-268, -1049, -247],
|
||||
[-279, -1026, -260],
|
||||
[-286, -958, -187],
|
||||
[-288, -890, -167],
|
||||
[-359, -873, -168],
|
||||
[-324, -904, -147],
|
||||
[-263, -804, -134],
|
||||
[-214, -712, 37],
|
||||
[-189, -698, 29],
|
||||
[-183, -755, 74],
|
||||
[-182, -841, 98],
|
||||
[-115, -894, 73],
|
||||
[-149, -857, 57],
|
||||
[-93, -927, -68],
|
||||
[-145, -988, -120],
|
||||
[-112, -1095, -112],
|
||||
[-201, -1059, -146],
|
||||
[-278, -1104, -206],
|
||||
[-284, -1204, -213],
|
||||
[-214, -966, -254],
|
||||
[-272, -730, -140],
|
||||
[-233, -785, -252],
|
||||
[-259, -813, -272],
|
||||
[-156, -840, -205],
|
||||
[-163, -765, -110],
|
||||
[-165, -741, 97],
|
||||
[-164, -791, 86],
|
||||
[-99, -849, -69],
|
||||
[-99, -820, -81],
|
||||
[-94, -842, -37],
|
||||
[-142, -881, -109],
|
||||
[-153, -978, -155],
|
||||
[-212, -934, 71],
|
||||
[-341, -947, 99],
|
||||
[-406, -1039, -283],
|
||||
[-265, -1146, -206],
|
||||
[-296, -979, -163],
|
||||
[-345, -864, 98],
|
||||
[-216, -907, 38],
|
||||
[-242, -809, 47],
|
||||
[-154, -736, 52],
|
||||
[-137, -700, -101],
|
||||
[-184, -743, -136],
|
||||
[-191, -850, 86],
|
||||
[-206, -883, 85],
|
||||
[-194, -875, 48],
|
||||
[-148, -937, 46],
|
||||
[-193, -983, 31],
|
||||
[-176, -1062, 43],
|
||||
[-251, -1006, -114],
|
||||
[-284, -1036, -192],
|
||||
[-374, -1181, -248],
|
||||
[-167, -1177, -271],
|
||||
[-253, -794, -128],
|
||||
[-285, -651, -129],
|
||||
[-228, -757, -227],
|
||||
[-260, -843, -201],
|
||||
[-189, -899, -253],
|
||||
[-212, -800, -136],
|
||||
[-218, -728, -136],
|
||||
[-177, -761, -129],
|
||||
[-165, -806, -137],
|
||||
[-157, -839, -122],
|
||||
[-116, -899, -104],
|
||||
[-191, -874, 77],
|
||||
[-174, -911, 95],
|
||||
[-193, -971, -147],
|
||||
[-255, -961, -127],
|
||||
[-222, -1052, -124],
|
||||
[-333, -1021, -223],
|
||||
[-245, -1018, -215],
|
||||
[-269, -850, 91],
|
||||
[-318, -754, -120],
|
||||
[-335, -878, -199],
|
||||
[-322, -986, -224],
|
||||
[-192, -902, -179],
|
||||
[-177, -712, 86],
|
||||
[-196, -673, 88],
|
||||
[-178, -751, -101],
|
||||
[-182, -847, 70],
|
||||
[-147, -909, -131],
|
||||
[-170, -939, 43],
|
||||
[-224, -994, 60],
|
||||
[-189, -1051, 42],
|
||||
[-242, -968, -183],
|
||||
[-312, -978, -213],
|
||||
[-317, -1298, -334],
|
||||
[-184, -1131, -330],
|
||||
[-287, -754, -141],
|
||||
[-249, -773, -287],
|
||||
[-166, -842, -297],
|
||||
[-196, -742, -214],
|
||||
[-163, -729, -198],
|
||||
[-177, -757, -197],
|
||||
[-174, -830, -155],
|
||||
[-159, -860, -149],
|
||||
[-145, -856, 72],
|
||||
[-132, -849, 47],
|
||||
[-145, -839, 62],
|
||||
[-179, -843, 76],
|
||||
[-163, -941, -114],
|
||||
[-230, -963, -110],
|
||||
]
|
||||
|
||||
|
||||
##################################################################################################
|
||||
def real_value_fft(x):
|
||||
""" Real value FFT as described in Appendix of:
|
||||
"Real-valued Fast Fourier Transform Algorithm", from IEEE Transactions on Acoustics, Speech,
|
||||
and Signal Processing, Vol. ASSP-35, No. 6, June 1987
|
||||
"""
|
||||
|
||||
# Make sure we have a power of 2 length input
|
||||
n = len(x)
|
||||
m = int(math.log(n, 2))
|
||||
if (math.pow(2, m) != n):
|
||||
raise RuntimeError("Length must be a power of 2")
|
||||
|
||||
# The rest of the code assumes 1-based indexing (it comes from fortran)
|
||||
x = [0] + x
|
||||
|
||||
# ---------------------------------------------------------------------------------
|
||||
# Digit reverse counter
|
||||
j = 1
|
||||
n1 = n - 1
|
||||
for i in range(1, n1 + 1):
|
||||
if (i < j):
|
||||
xt = x[j]
|
||||
x[j] = x[i]
|
||||
x[i] = xt
|
||||
k = n/2
|
||||
while (k < j):
|
||||
j = j - k
|
||||
k = k / 2
|
||||
j = j + k
|
||||
|
||||
# ---------------------------------------------------------------------------------
|
||||
# Length 2 butterflies
|
||||
for i in range(1, n + 1, 2):
|
||||
xt = x[i]
|
||||
x[i] = xt + x[i+1]
|
||||
x[i+1] = xt - x[i+1]
|
||||
|
||||
# ---------------------------------------------------------------------------------
|
||||
# Other butterflies
|
||||
n2 = 1
|
||||
for k in range(2, m + 1):
|
||||
n4 = n2
|
||||
n2 = 2 * n4
|
||||
n1 = 2 * n2
|
||||
e = 2 * math.pi / n1
|
||||
for i in range(1, n+1, n1):
|
||||
xt = x[i]
|
||||
x[i] = xt + x[i + n2]
|
||||
x[i + n2] = xt - x[i + n2]
|
||||
x[i + n4 + n2] = -x[i + n4 + n2]
|
||||
|
||||
a = e
|
||||
for j in range(1, n4 - 1):
|
||||
i1 = i + j
|
||||
i2 = i - j + n2
|
||||
i3 = i + j + n2
|
||||
i4 = i - j + n1
|
||||
cc = math.cos(a)
|
||||
ss = math.sin(a)
|
||||
a = a + e
|
||||
t1 = x[i3] * cc + x[i4] * ss
|
||||
t2 = x[i3] * ss - x[i4] * cc
|
||||
x[i4] = x[i2] - t2
|
||||
x[i3] = -x[i2] - t2
|
||||
x[i2] = x[i1] - t1
|
||||
x[i1] = x[i1] + t1
|
||||
|
||||
return x[1:]
|
||||
|
||||
|
||||
###################################################################################################
|
||||
def compute_magnitude(x):
|
||||
""" The real_value_fft() produces an array containing outputs in this order:
|
||||
[Re(0), Re(1), ..., Re(N/2), Im(N/2-1), ..., Im(1)]
|
||||
|
||||
This method returns the magnitudes. The magnitude of term i is sqrt(Re(i)**2 + Im(i)**2)
|
||||
"""
|
||||
result = []
|
||||
n = len(x)
|
||||
real_idx = 0
|
||||
im_idx = n - 1
|
||||
|
||||
result.append(x[real_idx])
|
||||
real_idx += 1
|
||||
|
||||
while real_idx <= n/2 - 1:
|
||||
mag = (x[real_idx]**2 + x[im_idx]**2) ** 0.5
|
||||
result.append(mag)
|
||||
real_idx += 1
|
||||
im_idx -= 1
|
||||
|
||||
result.append(x[real_idx])
|
||||
return result
|
||||
|
||||
|
||||
###################################################################################################
|
||||
def apply_gausian(x, width=0.1):
|
||||
""" Multiply x by the gaussian function. Width is a fraction, like 0.1
|
||||
"""
|
||||
result = []
|
||||
n = len(x)
|
||||
mid = float(n/2)
|
||||
denominator = n**2 * width
|
||||
|
||||
for i in range(len(x)):
|
||||
print i-mid, (i-mid)**2, -1 * (i - mid)**2/denominator, \
|
||||
math.exp(-1 * (i - mid)**2/denominator)
|
||||
g = math.exp(-1 * (i - mid)**2/denominator)
|
||||
result.append(g * x[i])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
###################################################################################################
|
||||
def print_graph(x):
|
||||
min_value = min(x)
|
||||
max_value = max(x)
|
||||
|
||||
extent = max(abs(min_value), abs(max_value))
|
||||
scale = 2 * extent
|
||||
min_value = -extent
|
||||
|
||||
for i in range(len(x)):
|
||||
print "%4d: %10.3f: " % (i, x[i]),
|
||||
position = int((x[i] - min_value) * 80 / scale)
|
||||
if position < 40:
|
||||
print ' ' * position,
|
||||
print '*' * (40 - position)
|
||||
else:
|
||||
print ' ' * 40,
|
||||
print '*' * (position - 40)
|
||||
|
||||
|
||||
###################################################################################################
|
||||
if __name__ == '__main__':
|
||||
|
||||
# Collect our command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--debug', action='store_true', help="Turn on debug logging")
|
||||
args = parser.parse_args()
|
||||
|
||||
level = logging.INFO
|
||||
if args.debug:
|
||||
level = logging.DEBUG
|
||||
logging.basicConfig(level=level)
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# Constant signal
|
||||
if 0:
|
||||
input_len = 128
|
||||
input = [1 for x in range(input_len)]
|
||||
print "\n############ INPUT ######################"
|
||||
print_graph(input)
|
||||
|
||||
result = real_value_fft(input)
|
||||
|
||||
print "\n############ RESULT ######################"
|
||||
print_graph(result)
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# N sine waves
|
||||
if 0:
|
||||
input_len = 128
|
||||
freq = 7
|
||||
input = [math.cos(float(x)/input_len * freq * 2 * math.pi) for x in range(input_len)]
|
||||
print "\n############ INPUT ######################"
|
||||
print_graph(input)
|
||||
|
||||
print "\n############ GAUSIAN OF INPUT ############"
|
||||
# input = apply_gausian(input, 0.1)
|
||||
print_graph(input)
|
||||
|
||||
result = real_value_fft(input)
|
||||
|
||||
print "\n############ REAL, IMAG ######################"
|
||||
print_graph(result)
|
||||
|
||||
print "\n############ MAGNITUDE ######################"
|
||||
mag = compute_magnitude(result)
|
||||
print_graph(mag)
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# Step data
|
||||
if 1:
|
||||
input_len = 128
|
||||
raw_input = g_walk_10_steps[0:input_len]
|
||||
|
||||
x_data = [x for x, y, z in raw_input]
|
||||
x_mean = sum(x_data) / len(x_data)
|
||||
x_data = [x - x_mean for x in x_data]
|
||||
|
||||
y_data = [y for x, y, z in raw_input]
|
||||
y_mean = sum(y_data) / len(y_data)
|
||||
y_data = [y - y_mean for y in y_data]
|
||||
|
||||
z_data = [z for x, y, z in raw_input]
|
||||
z_mean = sum(z_data) / len(z_data)
|
||||
z_data = [z - z_mean for z in z_data]
|
||||
|
||||
print "\n############ X ######################"
|
||||
print_graph(x_data)
|
||||
print "\n############ Y ######################"
|
||||
print_graph(y_data)
|
||||
print "\n############ Z ######################"
|
||||
print_graph(z_data)
|
||||
|
||||
input = []
|
||||
for (x, y, z) in raw_input:
|
||||
mag = x**2 + y**2 + z**2
|
||||
mag = mag ** 0.5
|
||||
input.append(mag)
|
||||
|
||||
mean_mag = sum(input) / len(input)
|
||||
input = [x - mean_mag for x in input]
|
||||
|
||||
print "\n############ INPUT ######################"
|
||||
# input = apply_gausian(input)
|
||||
print_graph(input)
|
||||
|
||||
result = real_value_fft(input)
|
||||
|
||||
print "\n############ REAL, IMAG ######################"
|
||||
print_graph(result)
|
||||
|
||||
print "\n############ MAGNITUDE ######################"
|
||||
mag = compute_magnitude(result)
|
||||
print_graph(mag)
|
78
tools/activity/gen_data_logging_blob.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#################################################################################################
|
||||
# Generate sample health activity blob
|
||||
##################################################################################################
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
import struct
|
||||
|
||||
|
||||
"""
|
||||
typedef struct {
|
||||
uint8_t steps; // # of steps in this minute
|
||||
uint8_t orientation; // average orientation of the watch
|
||||
uint8_t vmc; // vector magnitude count
|
||||
} MinuteData;
|
||||
|
||||
|
||||
typedef struct {
|
||||
uint16_t version; // version, initial version is 1
|
||||
uint16_t len; // length in bytes of blob, including this entire header
|
||||
uint32_t time_utc; // UTC time of pebble
|
||||
uint32_t time_local; // local time of pebble
|
||||
uint16_t num_samples; // number of samples that follow
|
||||
MinuteData samples[];
|
||||
} Header
|
||||
|
||||
"""
|
||||
|
||||
###################################################################################################
|
||||
if __name__ == '__main__':
|
||||
|
||||
# Collect our command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--debug', action='store_true', help="Turn on debug logging")
|
||||
args = parser.parse_args()
|
||||
|
||||
level = logging.INFO
|
||||
if args.debug:
|
||||
level = logging.DEBUG
|
||||
logging.basicConfig(level=level)
|
||||
|
||||
sample_format = '<BBB'
|
||||
header_format = '<HHIIH'
|
||||
|
||||
num_samples = 10
|
||||
blob = struct.pack(
|
||||
header_format,
|
||||
1,
|
||||
struct.calcsize(header_format) + num_samples * struct.calcsize(sample_format),
|
||||
int(time.time()),
|
||||
int(time.time()),
|
||||
num_samples)
|
||||
|
||||
for i in range(num_samples):
|
||||
blob += struct.pack(sample_format,
|
||||
30 + (i % 5),
|
||||
4,
|
||||
50 + (i % 4))
|
||||
|
||||
with open('health_blob.bin', "w") as out:
|
||||
out.write(blob)
|
780
tools/activity/parse_activity_data_logging_records.py
Normal file
|
@ -0,0 +1,780 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" Parse the data logging records generated by the activity service
|
||||
|
||||
This tool accepts binary data files obtained from the data logging service, or log files
|
||||
captured from a Pebble support request. If given a log file, we look for log lines that start with
|
||||
"RAW:". These contain the same content as the data logging records but base64 encoded.
|
||||
|
||||
Example log file entries from a Pebble support request
|
||||
2015-09-02 14:34:06:000GMT activity.c:706
|
||||
RAW: AQABAG2m5lUDcBkZjP///4v///+M////i////43///+L////jf///47///+L////kf///5P///8=
|
||||
2015-09-02 14:34:06:000GMT activity.c:710
|
||||
RAW: iP///4n///+P////kv///5H///+O////iv///4z///+J////jf///4r///+F////gv///3////8=
|
||||
|
||||
|
||||
To extract the binary data logging records from the Pebble, use the "pebble data-logging" utility:
|
||||
pebble data-logging disable-sends # so that records don't get sent to phone
|
||||
# ... Run the watch for a while to generate the data logging records
|
||||
pebble data-logging list # List all data logging sessions
|
||||
pebble data-logging download --session_id=XXX # where XXX is the session_id of the
|
||||
# activity service (uuid == 0,
|
||||
# tag = ALG_DLS_TAG)
|
||||
|
||||
This file will generate a .c file containing the sample data captured in the data logging records.
|
||||
This .c file can in turn be incorporated into the algorithm unit tests.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import datetime
|
||||
import gzip
|
||||
import json
|
||||
from jira.client import JIRA
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
|
||||
|
||||
# Create a ~/.triage JSON file and override/configure these keys:
|
||||
SETTINGS = {
|
||||
# JIRA server url / username / password
|
||||
'server_url': 'https://pebbletechnology.atlassian.net',
|
||||
'user': None,
|
||||
'password': None,
|
||||
|
||||
# Local path to download issue attachments to
|
||||
'download_path': '/tmp/',
|
||||
}
|
||||
|
||||
SLEEP_DEFAULT_EXPECTED_TEXT = \
|
||||
""" //> TEST_VERSION 3
|
||||
//> TEST_TOTAL -1
|
||||
//> TEST_TOTAL_MIN -1
|
||||
//> TEST_TOTAL_MAX -1
|
||||
//> TEST_DEEP -1
|
||||
//> TEST_DEEP_MIN -1
|
||||
//> TEST_DEEP_MAX -1
|
||||
//> TEST_START_AT -1
|
||||
//> TEST_START_AT_MIN -1
|
||||
//> TEST_START_AT_MAX -1
|
||||
//> TEST_END_AT -1
|
||||
//> TEST_END_AT_MIN -1
|
||||
//> TEST_END_AT_MAX -1
|
||||
//> TEST_CUR_STATE_ELAPSED -1
|
||||
//> TEST_CUR_STATE_ELAPSED_MIN -1
|
||||
//> TEST_CUR_STATE_ELAPSED_MAX -1
|
||||
//> TEST_IN_SLEEP 0
|
||||
//> TEST_IN_SLEEP_MIN 0
|
||||
//> TEST_IN_SLEEP_MAX 0
|
||||
//> TEST_IN_DEEP_SLEEP 0
|
||||
//> TEST_IN_DEEP_SLEEP_MIN 0
|
||||
//> TEST_IN_DEEP_SLEEP_MAX 0
|
||||
//> TEST_WEIGHT 1.0
|
||||
"""
|
||||
|
||||
#############################################################################################
|
||||
|
||||
|
||||
#############################################################################################
|
||||
class JIRASupport(object):
|
||||
DESYM_EXT = ".desym"
|
||||
|
||||
def __init__(self, issue_id):
|
||||
self.issue_id = issue_id
|
||||
|
||||
def issue_url(self, issue_id):
|
||||
return "%s/browse/%s" % (SETTINGS["server_url"], issue_id)
|
||||
|
||||
def issue_download_path(self, issue_id):
|
||||
return os.path.join(SETTINGS["download_path"], issue_id)
|
||||
|
||||
def download_path(self):
|
||||
return os.path.join(SETTINGS["download_path"])
|
||||
|
||||
def load_user_settings(self):
|
||||
settings_path = '~/.triage'
|
||||
try:
|
||||
user_settings_file = open(os.path.expanduser(settings_path), 'rb')
|
||||
user_settings = json.load(user_settings_file)
|
||||
except IOError as e:
|
||||
if e.errno == 2:
|
||||
logging.error("""Please create %s with credentials: """
|
||||
"""'{ "user": "$USER", "password": "$PASSWORD" }'""",
|
||||
settings_path)
|
||||
return
|
||||
SETTINGS.update(user_settings)
|
||||
|
||||
def create_jira(self):
|
||||
self.load_user_settings()
|
||||
creds = (SETTINGS["user"], SETTINGS["password"])
|
||||
return JIRA(options={'server': SETTINGS["server_url"]},
|
||||
basic_auth=creds)
|
||||
|
||||
def download_attachments(self, issue_path, issue):
|
||||
attachments = issue.fields.attachment
|
||||
if not attachments:
|
||||
raise Exception("No attachments found.")
|
||||
local_paths = []
|
||||
for attachment in attachments:
|
||||
filename = attachment.raw['filename']
|
||||
local_path = os.path.join(issue_path, filename)
|
||||
local_paths.append(local_path)
|
||||
if os.path.exists(local_path):
|
||||
logging.debug("Skipping %s: file already exists.", filename)
|
||||
continue
|
||||
logging.info("Downloading %s..." % filename)
|
||||
open(local_path, 'wb').write(attachment.get())
|
||||
return set(local_paths)
|
||||
|
||||
def unzip_android_logs(self, paths):
|
||||
ungz_paths = []
|
||||
gz_paths = filter(lambda path: re.search("\.gz$", path), paths)
|
||||
for gz_path in gz_paths:
|
||||
with gzip.open(gz_path, 'rb') as f_in:
|
||||
ungz_path = os.path.splitext(gz_path)[0]
|
||||
with open(ungz_path, 'wb') as f_out:
|
||||
try:
|
||||
f_out.writelines(f_in)
|
||||
except IOError:
|
||||
logging.error("Error writing unzipped android log")
|
||||
finally:
|
||||
f_out.close()
|
||||
ungz_paths.append(ungz_path)
|
||||
return set(ungz_paths)
|
||||
|
||||
def get_watch_logs(self, local_attachment_paths):
|
||||
""" Goes through the list of attachment paths returns a list containing only the
|
||||
watch logs.
|
||||
"""
|
||||
watch_logs = []
|
||||
got_device_logs = False
|
||||
for path in sorted(local_attachment_paths):
|
||||
# iOS uses "watch_logs...", Android <2.1 uses "pebble.log", and Androind >=2.1 (Holo)
|
||||
# uses "device-logs.log"
|
||||
if "watch_logs" not in path and "pebble.log" not in path and "device-logs" not in path:
|
||||
# Not a watch_logs... file
|
||||
continue
|
||||
if "device-logs" in path:
|
||||
got_device_logs = True
|
||||
elif "pebble.log" in path and got_device_logs:
|
||||
# Newer Android requests (Holo) have watch logs in "device-logs", not pebble.log
|
||||
continue
|
||||
if ".gz" in path:
|
||||
# Don't try to process pebble.log.gz
|
||||
continue
|
||||
if JIRASupport.DESYM_EXT in path:
|
||||
# This watch_logs file is a desymmed log file
|
||||
continue
|
||||
watch_logs.append(path)
|
||||
return watch_logs
|
||||
|
||||
def all_logs(self):
|
||||
""" Download all watch log files and return a list of their filenames """
|
||||
jira = self.create_jira()
|
||||
issue = jira.issue(self.issue_id)
|
||||
|
||||
# Create /tmp/ISSUE_ID to download attachments to:
|
||||
issue_path = self.issue_download_path(self.issue_id)
|
||||
if not os.path.exists(issue_path):
|
||||
logging.info("Creating %s" % issue_path)
|
||||
os.makedirs(issue_path)
|
||||
else:
|
||||
logging.info("Using %s" % issue_path)
|
||||
|
||||
# Download attachments:
|
||||
try:
|
||||
local_attachment_paths = self.download_attachments(issue_path, issue)
|
||||
except:
|
||||
logging.info("No attachments to process")
|
||||
return []
|
||||
|
||||
# Android attaches a .gz with all the logs:
|
||||
android_log_paths = self.unzip_android_logs(local_attachment_paths)
|
||||
local_attachment_paths = local_attachment_paths | android_log_paths
|
||||
return self.get_watch_logs(local_attachment_paths)
|
||||
|
||||
|
||||
#############################################################################################
|
||||
class ParseAccelSamplesFile(object):
|
||||
""" Parse raw accel data and produce a text output file from it. The raw binary
|
||||
format is documented as the ActivityRawSamplesRecord structure in activity.h
|
||||
|
||||
For 'c' format, we want this type of format
|
||||
|
||||
AccelRawData *activity_sample_42(int *len) {
|
||||
static AccelRawData samples[] = {
|
||||
{ -362, -861, 69},
|
||||
{ -309, -899, 45},
|
||||
...
|
||||
{ -163, -941, -114},
|
||||
{ -230, -963, -110},
|
||||
};
|
||||
*len = ARRAY_LENGTH(samples);
|
||||
return samples;
|
||||
}
|
||||
"""
|
||||
|
||||
ACTIVITY_RAW_SAMPLE_FLAG_FIRST_RECORD = 0x01 # Set for first record of session
|
||||
ACTIVITY_RAW_SAMPLE_FLAG_LAST_RECORD = 0x02 # set for last record of session
|
||||
ACTIVITY_RAW_SAMPLES_VERSION_1 = 1
|
||||
ACTIVITY_RAW_SAMPLES_VERSION_2 = 2
|
||||
ACTIVITY_RAW_SAMPLES_MAX_ENTRIES = 25
|
||||
ACTIVITY_RAW_SAMPLES_PER_SECOND = 25
|
||||
|
||||
#############################################################################################
|
||||
def __init__(self, image, bin_file, sample_prefix, format, exp, exp_min, exp_max):
|
||||
self.bin_file = bin_file
|
||||
self.format = format
|
||||
self.expected = (exp, exp_min, exp_max)
|
||||
self.sample_prefix = sample_prefix
|
||||
self.image = bytearray(image)
|
||||
self.offset = 0
|
||||
self.session_start_time_local = None
|
||||
self.session_id = None
|
||||
self.session_num_samples = 0
|
||||
|
||||
#############################################################################################
|
||||
def _output_start_of_session(self):
|
||||
if self.format == 'c':
|
||||
# Starting a new sample
|
||||
print("\n\n")
|
||||
print("// ----------------------------------------------------------------")
|
||||
print("// Sample captured: %s local" %
|
||||
(datetime.datetime.utcfromtimestamp(self.session_start_time_local)))
|
||||
print("AccelRawData *activity_sample_%s_%d(int *len) {" % (self.sample_prefix,
|
||||
self.session_id))
|
||||
print(" // The unit tests parse the //> TEST_.* lines below for test values")
|
||||
print(" //> TEST_NAME %s_%d" % (self.sample_prefix, self.session_id))
|
||||
print(" //> TEST_EXPECTED %d" % (self.expected[0]))
|
||||
print(" //> TEST_EXPECTED_MIN %d" % (self.expected[1]))
|
||||
print(" //> TEST_EXPECTED_MAX %d" % (self.expected[2]))
|
||||
print(" //> TEST_WEIGHT 1.0")
|
||||
print(" static AccelRawData samples[] = {")
|
||||
|
||||
else:
|
||||
print("\n\n\n")
|
||||
print("###################################################################")
|
||||
print("##### Start of sample %d. Place this section in a new file ########" %
|
||||
(self.session_id))
|
||||
print("###################################################################")
|
||||
|
||||
#############################################################################################
|
||||
def _output_sample(self, x, y, z):
|
||||
if self.format == 'c':
|
||||
print(" { %d, %d, %d}," % (x, y, z))
|
||||
else:
|
||||
print("%d, %d, %d" % (x, y, z))
|
||||
|
||||
#############################################################################################
|
||||
def _output_end_of_session(self):
|
||||
if self.session_id is not None:
|
||||
if self.format == 'c':
|
||||
print(" };")
|
||||
print(" *len = ARRAY_LENGTH(samples);")
|
||||
print(" return samples;")
|
||||
print("}\n")
|
||||
else:
|
||||
print("\n")
|
||||
|
||||
#############################################################################################
|
||||
def _parse_binary_item(self):
|
||||
hdr_format = '<HHIBBBB'
|
||||
pack_size = struct.calcsize(hdr_format)
|
||||
version, session_id, session_start_time_local, flags, len, num_samples, num_entries = \
|
||||
struct.unpack_from(hdr_format, self.image, self.offset)
|
||||
self.offset += pack_size
|
||||
|
||||
logging.debug("Got timestamp: %s, session %d, flags: 0x%x, num_entries: %d, "
|
||||
"num_samples: %d"
|
||||
% (datetime.datetime.utcfromtimestamp(session_start_time_local),
|
||||
session_id, flags, num_entries, num_samples))
|
||||
|
||||
if (version != self.ACTIVITY_RAW_SAMPLES_VERSION_1 and
|
||||
version != self.ACTIVITY_RAW_SAMPLES_VERSION_2):
|
||||
raise RuntimeError("Invalid record version: %d" % (version))
|
||||
|
||||
if session_id != self.session_id:
|
||||
# New session about to start
|
||||
# End the previous one
|
||||
self._output_end_of_session()
|
||||
|
||||
self.session_start_time_local = session_start_time_local
|
||||
self.session_id = session_id
|
||||
self.session_num_samples = 0
|
||||
|
||||
if not (flags & self.ACTIVITY_RAW_SAMPLE_FLAG_FIRST_RECORD):
|
||||
print("WARNING: Invalid record detected. Start of new session (%d) without"
|
||||
" the first record flag" % (session_id))
|
||||
|
||||
self._output_start_of_session()
|
||||
|
||||
# Extract each sample
|
||||
sample_format = '<I'
|
||||
pack_size = struct.calcsize(sample_format)
|
||||
num_samples_decoded = 0
|
||||
offset = self.offset
|
||||
samples_per_minute = 60 * self.ACTIVITY_RAW_SAMPLES_PER_SECOND
|
||||
for i in range(num_entries):
|
||||
encoded, = struct.unpack_from(sample_format, self.image, offset)
|
||||
offset += pack_size
|
||||
logging.debug("Got encoded sample %d: 0x%x" % (i, encoded))
|
||||
|
||||
if ((self.session_num_samples % 25) == 0):
|
||||
if self.format == 'c':
|
||||
print(" // %d seconds" % (self.session_num_samples / 25))
|
||||
|
||||
# Decode it
|
||||
if version == self.ACTIVITY_RAW_SAMPLES_VERSION_1:
|
||||
run_size = encoded >> 27
|
||||
x = ((encoded >> 18) & 0x1FF) << 3
|
||||
if (x & 0x800):
|
||||
x = -1 * (0x1000 - x)
|
||||
|
||||
y = ((encoded >> 9) & 0x1FF) << 3
|
||||
if (y & 0x800):
|
||||
y = -1 * (0x1000 - y)
|
||||
|
||||
z = (encoded & 0x1FF) << 3
|
||||
if (z & 0x800):
|
||||
z = -1 * (0x1000 - z)
|
||||
|
||||
elif version == self.ACTIVITY_RAW_SAMPLES_VERSION_2:
|
||||
run_size = encoded >> 30
|
||||
x = ((encoded >> 20) & 0x3FF) << 3
|
||||
if (x & 0x1000):
|
||||
x = -1 * (0x2000 - x)
|
||||
|
||||
y = ((encoded >> 10) & 0x3FF) << 3
|
||||
if (y & 0x1000):
|
||||
y = -1 * (0x2000 - y)
|
||||
|
||||
z = (encoded & 0x3FF) << 3
|
||||
if (z & 0x1000):
|
||||
z = -1 * (0x2000 - z)
|
||||
|
||||
else:
|
||||
raise RuntimeError("Unimplemented version")
|
||||
|
||||
for i in range(run_size):
|
||||
if (self.format == 'c' and (self.session_num_samples % samples_per_minute) == 0):
|
||||
print(" // elapsed: %d minutes"
|
||||
% (self.session_num_samples / samples_per_minute))
|
||||
self._output_sample(x, y, z)
|
||||
num_samples_decoded += 1
|
||||
self.session_num_samples += 1
|
||||
|
||||
self.offset += self.ACTIVITY_RAW_SAMPLES_MAX_ENTRIES * pack_size
|
||||
|
||||
# Make sure we got the expected # of samples
|
||||
if num_samples != num_samples_decoded:
|
||||
raise RuntimeError("Decoding error. Expected to find %d samples, but found %d",
|
||||
(num_samples, num_samples_decoded))
|
||||
|
||||
#############################################################################################
|
||||
def parse_log_file(self):
|
||||
# Occasionally, the log file will start with the 2nd line of a 2 line accel sample,
|
||||
# so if we can't parse from there, we try from the next line down
|
||||
second_line_offset = None
|
||||
|
||||
if not self.bin_file:
|
||||
# Convert text log file to binary format
|
||||
bin_data = bytearray()
|
||||
lines = self.image.split('\n')
|
||||
prefix = "RAW: "
|
||||
line_num = 0
|
||||
for line in lines:
|
||||
if prefix in line:
|
||||
base64_str = line[line.index(prefix) + len(prefix):]
|
||||
content = base64.b64decode(base64_str)
|
||||
bin_data += content
|
||||
if line_num == 0:
|
||||
second_line_offset = len(bin_data)
|
||||
line_num += 1
|
||||
|
||||
self.image = bin_data
|
||||
|
||||
# Parse the binary blob
|
||||
image_len = len(self.image)
|
||||
first_line = True
|
||||
while self.offset < image_len:
|
||||
try:
|
||||
self._parse_binary_item()
|
||||
except:
|
||||
if first_line:
|
||||
self.offset = 0
|
||||
self.image = self.image[second_line_offset:]
|
||||
image_len = len(self.image)
|
||||
else:
|
||||
raise
|
||||
first_line = False
|
||||
|
||||
self._output_end_of_session()
|
||||
|
||||
|
||||
#############################################################################################
|
||||
class ParseMinuteStatsFile(object):
|
||||
|
||||
|
||||
#############################################################################################
|
||||
def __init__(self, image, bin_file, sample_prefix, format, start_idx):
|
||||
self.bin_file = bin_file
|
||||
self.sample_prefix = sample_prefix
|
||||
self.format = format
|
||||
self.image = bytearray(image)
|
||||
self.offset = 0
|
||||
self.session_minute_idx = 0
|
||||
self.global_minute_idx = 0
|
||||
self.session_start_time_utc = None
|
||||
self.session_elapsed_time = 0
|
||||
self.start_idx = start_idx
|
||||
|
||||
#############################################################################################
|
||||
def _finish_session(self):
|
||||
if self.session_start_time_utc is not None:
|
||||
print(" };")
|
||||
print(" *len = ARRAY_LENGTH(samples);")
|
||||
print(" return samples;")
|
||||
print("}\n")
|
||||
|
||||
self.session_start_time_utc = None
|
||||
|
||||
#############################################################################################
|
||||
def _parse_version_2_binary_item(self):
|
||||
""" For 'c' format, we want this type of format
|
||||
|
||||
AlgDlsMinuteData *activity_sample_42(int *len) {
|
||||
// list of: {steps, orientation, vmc, light}
|
||||
static AlgDlsMinuteData samples[] = {
|
||||
{2, 3, 4},
|
||||
{5, 6, 7},
|
||||
...
|
||||
{8, 9, 10},
|
||||
{11, 12, 13},
|
||||
};
|
||||
*len = ARRAY_LENGTH(samples);
|
||||
return samples;
|
||||
}
|
||||
"""
|
||||
version_pack_size = 2
|
||||
hdr_format = '<HIIH'
|
||||
hdr_pack_size = struct.calcsize(hdr_format)
|
||||
blob_len, time_utc, time_local, num_samples = \
|
||||
struct.unpack_from(hdr_format, self.image, self.offset)
|
||||
self.offset += hdr_pack_size
|
||||
|
||||
logging.debug("Got blob: local time: %s, utc time: %s, num_samples: %d" %
|
||||
(datetime.datetime.utcfromtimestamp(time_local),
|
||||
datetime.datetime.utcfromtimestamp(time_utc), num_samples))
|
||||
|
||||
# See if this is a continuation of a previous session, or a new one
|
||||
if self.session_start_time_utc is not None \
|
||||
and abs(self.session_start_time_utc + self.session_elapsed_time - time_utc) > 60:
|
||||
self._finish_session()
|
||||
|
||||
if self.global_minute_idx >= self.start_idx:
|
||||
printing_on = True
|
||||
else:
|
||||
printing_on = False
|
||||
|
||||
timestamp = datetime.datetime.utcfromtimestamp(time_local)
|
||||
if self.session_start_time_utc is None:
|
||||
if self.format == 'c':
|
||||
self.session_minute_idx = 0
|
||||
# Starting a new sample
|
||||
print("\n\n")
|
||||
print("// ----------------------------------------------------------------")
|
||||
print("// Sample captured at: %s local, %s GMT" %
|
||||
(timestamp, datetime.datetime.utcfromtimestamp(time_utc)))
|
||||
print("AlgDlsMinuteData *activity_sample_%s(int *len) {"
|
||||
% (timestamp.strftime('%Y_%m_%d_%H_%M_%S')))
|
||||
print(" // The unit tests parse the //> TEST_.* lines below for test values")
|
||||
print(" //> TEST_NAME %s" % (self.sample_prefix))
|
||||
print(SLEEP_DEFAULT_EXPECTED_TEXT)
|
||||
print(" // list of: {steps, orientation, vmc, ligh}")
|
||||
print(" static AlgDlsMinuteData samples[] = {")
|
||||
|
||||
else:
|
||||
raise RuntimeError("Only 'c' format is supported for minute stats")
|
||||
else:
|
||||
if printing_on:
|
||||
print(" // %d: Local time: %s" % (self.session_minute_idx,
|
||||
timestamp.strftime('%Y-%m-%d %I:%M:%S %p')))
|
||||
# Save the params from the header
|
||||
self.session_start_time_utc = time_utc
|
||||
self.session_elapsed_time = num_samples * 60
|
||||
self.session_minute_idx += num_samples
|
||||
self.global_minute_idx += num_samples
|
||||
|
||||
# Extract each sample
|
||||
sample_format = '<BBHB'
|
||||
sample_pack_size = struct.calcsize(sample_format)
|
||||
|
||||
expected_len = version_pack_size + hdr_pack_size + num_samples * sample_pack_size
|
||||
if blob_len != expected_len:
|
||||
raise RuntimeError("Invalid len in header (%d). Expected %d" % (blob_len, expected_len))
|
||||
|
||||
for i in range(num_samples):
|
||||
steps, orient, vmc, light = struct.unpack_from(sample_format, self.image, self.offset)
|
||||
self.offset += sample_pack_size
|
||||
logging.debug("Got sample %d: %d, %d, %d, %d" % (i, steps, orient, vmc, light))
|
||||
if printing_on:
|
||||
print(" { %d, 0x%x, %d, 0x%x}," % (steps, orient, vmc, light))
|
||||
|
||||
if not printing_on:
|
||||
self.session_minute_idx = 0
|
||||
|
||||
#############################################################################################
|
||||
def _parse_version_3_to_5_binary_item(self, version):
|
||||
""" For 'c' format, we want this type of format
|
||||
|
||||
AlgDlsMinuteData *activity_sample_42(int *len) {
|
||||
// list of: {steps, orientation, vmc, light}
|
||||
static AlgDlsMinuteData samples[] = {
|
||||
{2, 3, 4, 5},
|
||||
{5, 6, 7, 8},
|
||||
...
|
||||
{8, 9, 10, 11},
|
||||
{11, 12, 13, 14},
|
||||
};
|
||||
*len = ARRAY_LENGTH(samples);
|
||||
return samples;
|
||||
}
|
||||
"""
|
||||
# NOTE: the version was already pulled out
|
||||
hdr_format = '<Ib'
|
||||
hdr_pack_size = struct.calcsize(hdr_format)
|
||||
time_utc, time_local_offset_15_min = struct.unpack_from(hdr_format, self.image, self.offset)
|
||||
self.offset += hdr_pack_size
|
||||
time_local = time_utc + (time_local_offset_15_min * 15 * 60)
|
||||
|
||||
# Structure of each minute sample
|
||||
if version <= 4:
|
||||
# steps, orientation, vmc. light
|
||||
sample_format = '<BBHB'
|
||||
else:
|
||||
# steps, orientation, vmc, light, flags (plugged_in)
|
||||
sample_format = '<BBHBB'
|
||||
sample_pack_size = struct.calcsize(sample_format)
|
||||
num_samples = 15
|
||||
|
||||
# Version 4 and 5 have sample_size and num_samples
|
||||
if version >= 4:
|
||||
hdr_aux_format = '<bb'
|
||||
sample_size, num_samples = struct.unpack_from(hdr_aux_format, self.image, self.offset)
|
||||
self.offset += struct.calcsize(hdr_aux_format)
|
||||
if (num_samples != 15):
|
||||
raise RuntimeError("Invalid num_samples value of %d" % (num_samples))
|
||||
if (sample_size != sample_pack_size):
|
||||
raise RuntimeError("Invalid sample size of %d" % (sampleSize))
|
||||
|
||||
# Print header info
|
||||
logging.debug("Got blob: local time: %s, utc time: %s, num_samples: %d" %
|
||||
(datetime.datetime.utcfromtimestamp(time_local),
|
||||
datetime.datetime.utcfromtimestamp(time_utc), num_samples))
|
||||
|
||||
# See if this is a continuation of a previous session, or a new one
|
||||
if (self.session_start_time_utc is not None and
|
||||
abs(self.session_start_time_utc + self.session_elapsed_time - time_utc) > 60):
|
||||
self._finish_session()
|
||||
|
||||
if self.global_minute_idx >= self.start_idx:
|
||||
printing_on = True
|
||||
else:
|
||||
printing_on = False
|
||||
|
||||
timestamp = datetime.datetime.utcfromtimestamp(time_local)
|
||||
if self.session_start_time_utc is None:
|
||||
if self.format == 'c':
|
||||
self.session_minute_idx = 0
|
||||
# Starting a new sample
|
||||
print("\n\n")
|
||||
print("// ----------------------------------------------------------------")
|
||||
print("// Sample captured at: %s local, %s GMT" %
|
||||
(timestamp, datetime.datetime.utcfromtimestamp(time_utc)))
|
||||
print("AlgDlsMinuteData *activity_sample_%s(int *len) {"
|
||||
% (timestamp.strftime('%Y_%m_%d_%H_%M_%S')))
|
||||
print(" // The unit tests parse the //> TEST_.* lines below for test values")
|
||||
print(" //> TEST_NAME %s" % (self.sample_prefix))
|
||||
print(SLEEP_DEFAULT_EXPECTED_TEXT)
|
||||
print(" // list of: {steps, orientation, vmc, light, plugged_in}")
|
||||
print(" static AlgDlsMinuteData samples[] = {")
|
||||
|
||||
else:
|
||||
raise RuntimeError("Only 'c' format is supported for minute stats")
|
||||
else:
|
||||
if printing_on:
|
||||
print(" // %d: Local time: %s" % (self.session_minute_idx,
|
||||
timestamp.strftime('%Y-%m-%d %I:%M:%S %p')))
|
||||
|
||||
# Save the params from the header
|
||||
self.session_start_time_utc = time_utc
|
||||
self.session_elapsed_time = num_samples * 60
|
||||
self.session_minute_idx += num_samples
|
||||
self.global_minute_idx += num_samples
|
||||
|
||||
# Extract each sample
|
||||
|
||||
for i in range(num_samples):
|
||||
if version <= 4:
|
||||
steps, orient, vmc, light = struct.unpack_from(sample_format, self.image,
|
||||
self.offset)
|
||||
flags = 0
|
||||
else:
|
||||
steps, orient, vmc, light, flags = struct.unpack_from(sample_format, self.image,
|
||||
self.offset)
|
||||
self.offset += sample_pack_size
|
||||
logging.debug("Got sample %d: %d, %d, %d, %d, 0x%x" % (i, steps, orient, vmc, light,
|
||||
flags))
|
||||
plugged_in = flags & 0x01
|
||||
if printing_on:
|
||||
print(" { %d, 0x%x, %d, %d, %d}," % (steps, orient, vmc, light, plugged_in))
|
||||
|
||||
if not printing_on:
|
||||
self.session_minute_idx = 0
|
||||
|
||||
#############################################################################################
|
||||
def _parse_binary_item(self):
|
||||
""" For 'c' format, we want this type of format
|
||||
|
||||
AlgDlsMinuteData *activity_sample_42(int *len) {
|
||||
static AlgDlsMinuteData samples[] = {
|
||||
{2, 3, 4},
|
||||
{5, 6, 7},
|
||||
...
|
||||
{8, 9, 10},
|
||||
{11, 12, 13},
|
||||
};
|
||||
*len = ARRAY_LENGTH(samples);
|
||||
return samples;
|
||||
}
|
||||
"""
|
||||
# Get the version first
|
||||
version_format = '<H'
|
||||
version_pack_size = struct.calcsize(version_format)
|
||||
version, = \
|
||||
struct.unpack_from(version_format, self.image, self.offset)
|
||||
self.offset += version_pack_size
|
||||
|
||||
# This is version 2 of AlgDlsRecordHdr
|
||||
if version == 2:
|
||||
self._parse_version_2_binary_item()
|
||||
|
||||
# This is version 3 or 4 of AlgDlsRecordHdr
|
||||
elif (version >= 3) and (version <= 5):
|
||||
self._parse_version_3_to_5_binary_item(version)
|
||||
|
||||
else:
|
||||
raise RuntimeError("This blob has version %d, which is unrecognized" % (version))
|
||||
|
||||
#############################################################################################
|
||||
def parse_log_file(self):
|
||||
if not self.bin_file:
|
||||
# Convert text log file to binary format
|
||||
bin_data = bytearray()
|
||||
lines = self.image.split('\n')
|
||||
prefix = "SLP: "
|
||||
for line in lines:
|
||||
if prefix in line:
|
||||
logging.debug("Converting %s to binary" % (line))
|
||||
base64_str = line[line.index(prefix) + len(prefix):]
|
||||
content = base64.b64decode(base64_str)
|
||||
bin_data += content
|
||||
|
||||
self.image = bin_data
|
||||
|
||||
# Parse the binary blob
|
||||
image_len = len(self.image)
|
||||
while self.offset < image_len:
|
||||
self._parse_binary_item()
|
||||
|
||||
self._finish_session()
|
||||
|
||||
|
||||
##################################################################################################
|
||||
if __name__ == '__main__':
|
||||
# Collect our command line arguments
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument('infile', help="The input file. This can be a .bin file obtained using the "
|
||||
"'pebble data-logging download' command. a .txt log file from a"
|
||||
"support request, or a JIRA ticket number like 'PBL-25667' (no extension)")
|
||||
parser.add_argument('--input', choices=['minute_stats', 'accel_samples'],
|
||||
default='minute_stats',
|
||||
help="The type of data logging records being fed in."
|
||||
" 'minute_stats' are generated on the watch with a data logging tag of 81, "
|
||||
" 'accel_samples' with a tag of 82.")
|
||||
parser.add_argument('--output', choices=['c', 'pebble-tool'], default='c',
|
||||
help="The format of the output")
|
||||
parser.add_argument('--start', type=int, default=0,
|
||||
help="Start dumping records from this offset. This is helpful if you "
|
||||
" get a huge log file and only want the end.")
|
||||
parser.add_argument('--name_prefix', default="walk",
|
||||
help="Each sample's name in the generated output will begin with this "
|
||||
"prefix")
|
||||
parser.add_argument('--expected', type=int, default=-1,
|
||||
help="When generating accel sample files, imbed this into the meta-data as"
|
||||
"the expected number of steps")
|
||||
parser.add_argument('--expected_min', type=int, default=-1,
|
||||
help="When generating accel sample files, imbed this into the meta-data as"
|
||||
"the acceptable minimum number of steps")
|
||||
parser.add_argument('--expected_max', type=int, default=-1,
|
||||
help="When generating accel sample files, imbed this into the meta-data as"
|
||||
"the acceptable maximum number of steps")
|
||||
|
||||
parser.add_argument('--debug', action='store_true', help="Turn on debug logging")
|
||||
args = parser.parse_args()
|
||||
|
||||
level = logging.INFO
|
||||
if args.debug:
|
||||
level = logging.DEBUG
|
||||
logging.basicConfig(level=level)
|
||||
from_jira = False
|
||||
|
||||
if args.infile.endswith('.bin'):
|
||||
input_is_bin = True
|
||||
infiles = [args.infile]
|
||||
elif args.infile.endswith('.txt') or args.infile.endswith('.log'):
|
||||
input_is_bin = False
|
||||
infiles = [args.infile]
|
||||
elif '.' in args.infile:
|
||||
raise RuntimeError("Invalid input file type. Must end with .txt or .bin")
|
||||
else:
|
||||
input_is_bin = False
|
||||
jira = JIRASupport(args.infile)
|
||||
infiles = jira.all_logs()
|
||||
from_jira = True
|
||||
|
||||
# Figure out a good prefix to use for each sample name
|
||||
if from_jira:
|
||||
sample_prefix = args.infile.lower()
|
||||
else:
|
||||
sample_prefix = os.path.splitext(os.path.basename(args.infile))[0].lower()
|
||||
|
||||
for file in infiles:
|
||||
if args.input == 'accel_samples':
|
||||
sample_prefix = "%s_%s" % (args.name_prefix, sample_prefix.replace('-', '_'))
|
||||
parser = ParseAccelSamplesFile(image=open(file, 'r').read(), bin_file=input_is_bin,
|
||||
sample_prefix=sample_prefix, format=args.output,
|
||||
exp=args.expected, exp_min=args.expected_min,
|
||||
exp_max=args.expected_max)
|
||||
else:
|
||||
sample_prefix = "%s" % (sample_prefix.replace('-', '_'))
|
||||
parser = ParseMinuteStatsFile(image=open(file, 'r').read(), bin_file=input_is_bin,
|
||||
sample_prefix=sample_prefix, format=args.output,
|
||||
start_idx=args.start)
|
||||
|
||||
parser.parse_log_file()
|
43
tools/activity/plot_magnitudes.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" Plot the stats contained in the .csv file generated by the test_kraepelin_algorithm
|
||||
unit test (when STATS_FILE_NAME is defined).
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import matplotlib.pyplot as pyplot
|
||||
|
||||
|
||||
##################################################################################################
|
||||
if __name__ == '__main__':
|
||||
values = [73, 75, 39, 41, 90, 128, 105, 156, 212, 23, 92, 78, 57, 46, 44, 52, 31, 26, 23, 13,
|
||||
22, 11, 20, 25, 12, 13, 10, 25, 17, 23, 16, 15, 12, 20, 12, 21, 40, 38, 20, 21, 21,
|
||||
41, 52, 35, 33, 23, 26, 21, 32, 23, 20, 16, 24, 23, 40, 46, 89, 152, 88, 33, 53, 11,
|
||||
36, 45]
|
||||
|
||||
x_axis = range(64)
|
||||
x_labels = [str(x) for x in x_axis]
|
||||
|
||||
pyplot.bar(x_axis, values, align='center')
|
||||
pyplot.xticks(x_axis, x_labels, rotation='vertical')
|
||||
pyplot.xlim(x_axis[0] - .5, x_axis[-1] + .5)
|
||||
pyplot.show()
|
78
tools/activity/plot_stats.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" Plot the stats contained in the .csv file generated by the test_kraepelin_algorithm
|
||||
unit test (when STATS_FILE_NAME is defined).
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import matplotlib.pyplot as pyplot
|
||||
|
||||
|
||||
##################################################################################################
|
||||
def plot_stats(args, rows, stat_name):
|
||||
# Get the vmc and score for each of the stepping epochs
|
||||
stepping_vmcs = []
|
||||
stepping_scores = []
|
||||
non_stepping_vmcs = []
|
||||
non_stepping_scores = []
|
||||
for row in rows:
|
||||
if int(row['epoch_type']) == 0:
|
||||
non_stepping_vmcs.append(int(row['vmc']))
|
||||
non_stepping_scores.append(int(row[stat_name]))
|
||||
elif int(row['epoch_type']) == 2:
|
||||
stepping_vmcs.append(int(row['vmc']))
|
||||
stepping_scores.append(int(row[stat_name]))
|
||||
|
||||
pyplot.plot(stepping_vmcs, stepping_scores, 'go',
|
||||
non_stepping_vmcs, non_stepping_scores, 'ro')
|
||||
pyplot.show()
|
||||
|
||||
|
||||
##################################################################################################
|
||||
if __name__ == '__main__':
|
||||
# Collect our command line arguments
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument('infile', help="The input csv file")
|
||||
parser.add_argument('--plot', choices=['score_0', 'score_lf', 'total'],
|
||||
default='score_0',
|
||||
help="Which metric to plot against vmc")
|
||||
parser.add_argument('--debug', action='store_true', help="Turn on debug logging")
|
||||
args = parser.parse_args()
|
||||
|
||||
level = logging.INFO
|
||||
if args.debug:
|
||||
level = logging.DEBUG
|
||||
logging.basicConfig(level=level)
|
||||
|
||||
# Read in the csv file
|
||||
col_names = None
|
||||
rows = []
|
||||
with open(args.infile, 'rb') as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
for row in reader:
|
||||
if reader.line_num == 1:
|
||||
col_names = [x.strip() for x in row]
|
||||
else:
|
||||
rows.append(dict(zip(col_names, row)))
|
||||
|
||||
# Plot now
|
||||
plot_stats(args, rows, args.plot)
|
206
tools/analyze_fw_static_memory_usage.py
Normal file
|
@ -0,0 +1,206 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
import analyze_static_memory_usage
|
||||
from binutils import nm_generator, analyze_elf
|
||||
|
||||
def cleanup_path(f):
|
||||
f = os.path.normpath(f)
|
||||
|
||||
# Check for .c.3.o style suffixes and strip them back to just .c
|
||||
if len(f) > 6 and f[-6:-3] == '.c.' and f[-2:] == '.o':
|
||||
f = f[:-4]
|
||||
|
||||
if f.startswith('src/'):
|
||||
f = f[4:]
|
||||
|
||||
newlib_index = f.rfind('/newlib/libc')
|
||||
if newlib_index != -1:
|
||||
f = f[newlib_index+1:]
|
||||
|
||||
libgcc_index = f.rfind('/libgcc/')
|
||||
if libgcc_index != -1:
|
||||
f = f[libgcc_index+1:]
|
||||
|
||||
libc_index = f.rfind('/arm-none-eabi/lib/')
|
||||
if libc_index != -1:
|
||||
f = f[libc_index+1:]
|
||||
|
||||
tintin_index = f.rfind('/tintin/src/')
|
||||
if tintin_index != -1:
|
||||
f = f[tintin_index + len('/tintin/src/'):]
|
||||
|
||||
tintin_build_index = f.rfind('/tintin/build/src/')
|
||||
if tintin_build_index != -1:
|
||||
f = f[tintin_build_index + len('/tintin/'):]
|
||||
|
||||
return f
|
||||
|
||||
analyze_static_memory_usage.cleanup_path_func = cleanup_path
|
||||
|
||||
def analyze_map(map_file, sections):
|
||||
# Now that we have a list of all the symbols listed in the nm output, we need to go back
|
||||
# and dig through the map file to find filenames for the symbols with an "Unknown" filename
|
||||
|
||||
# We only care about the .text section here
|
||||
if not 't' in sections:
|
||||
return
|
||||
|
||||
text_section = sections['t']
|
||||
|
||||
def line_generator(map_file):
|
||||
with open(map_file, 'r') as f:
|
||||
for line in f:
|
||||
yield line
|
||||
|
||||
lines = line_generator(map_file)
|
||||
for line in lines:
|
||||
if line.startswith('Linker script and memory map'):
|
||||
break
|
||||
|
||||
for line in lines:
|
||||
if line.startswith('.text'):
|
||||
break
|
||||
|
||||
# We're looking for groups of lines like the following...
|
||||
#
|
||||
# .text.do_tap_handle
|
||||
# 0x0000000008010e08 0x28 src/fw/applib/accel_service.c.3.o
|
||||
|
||||
symbol_pattern = re.compile(r""" \.?[^\.\s]*\.(\S+)""")
|
||||
for line in lines:
|
||||
match = symbol_pattern.match(line.rstrip())
|
||||
if match is None:
|
||||
continue
|
||||
|
||||
symbol = match.group(1)
|
||||
|
||||
line = lines.next()
|
||||
|
||||
cols = line.split()
|
||||
if len(cols) < 3:
|
||||
continue
|
||||
|
||||
filename = cols[2]
|
||||
|
||||
symbol_with_unknown_file = text_section.remove_unknown_entry(symbol)
|
||||
if symbol_with_unknown_file is None:
|
||||
continue
|
||||
text_section.add_entry(symbol, filename, symbol_with_unknown_file.size)
|
||||
|
||||
def analyze_libs(root_directory, sections, use_fast):
|
||||
def analyze_lib(lib_filename):
|
||||
for (_, section, symbol_name, filename, line, size) in nm_generator(lib_filename, use_fast):
|
||||
if not section in sections:
|
||||
continue
|
||||
|
||||
section_info = sections[section]
|
||||
|
||||
symbol_with_unknown_file = section_info.remove_unknown_entry(symbol_name)
|
||||
if symbol_with_unknown_file is None:
|
||||
continue
|
||||
|
||||
section_info.add_entry(symbol_name, lib_filename, size)
|
||||
|
||||
for (dirpath, dirnames, filenames) in os.walk(root_directory):
|
||||
for f in filenames:
|
||||
if f.endswith('.a'):
|
||||
analyze_lib(os.path.join(dirpath, f))
|
||||
|
||||
def print_groups(text_section, verbose):
|
||||
mappings = [
|
||||
('fw/vendor/FreeRTOS/', 'FreeRTOS'),
|
||||
('core/vendor/STM32F2xx_StdPeriph_Lib_V1.0.0', 'STM32'),
|
||||
('newlib/', 'newlib'),
|
||||
('libgcc/', 'libgcc'),
|
||||
('arm-none-eabi/lib/', 'libc'),
|
||||
('fw/applib/', 'FW Applib'),
|
||||
('fw/apps/', 'FW Apps'),
|
||||
('fw/comm/ble/', 'FW Comm LE'),
|
||||
('fw/comm/', 'FW Comm'),
|
||||
('fw/kernel/services/', 'FW Kernel Services'),
|
||||
('fw/', 'FW Other'),
|
||||
('core/', 'FW Other'),
|
||||
('build/src/fw', 'FW Other')
|
||||
]
|
||||
|
||||
class Group(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.total_size = 0
|
||||
self.files = []
|
||||
|
||||
def add_file(self, f):
|
||||
self.total_size += f.size
|
||||
self.files.append(f)
|
||||
|
||||
group_sizes = {}
|
||||
|
||||
for f in text_section.get_files():
|
||||
found = False
|
||||
|
||||
for prefix, value in mappings:
|
||||
if f.filename.startswith(prefix):
|
||||
if not value in group_sizes:
|
||||
group_sizes[value] = Group(f.filename)
|
||||
group_sizes[value].add_file(f)
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
if not 'Unknown' in group_sizes:
|
||||
group_sizes['Unknown'] = Group(f.filename)
|
||||
group_sizes['Unknown'].add_file(f)
|
||||
|
||||
sorted_items = sorted(group_sizes.iteritems(), key=lambda x: -x[1].total_size)
|
||||
for group_name, group in sorted_items:
|
||||
print "%-20s %u" % (group_name, group.total_size)
|
||||
if verbose:
|
||||
sorted_files = sorted(group.files, key=lambda x: -x.size)
|
||||
for f in sorted_files:
|
||||
print " %6u %-20s" % (f.size, f.filename)
|
||||
|
||||
|
||||
|
||||
if (__name__ == '__main__'):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--text_groups', action='store_true')
|
||||
parser.add_argument('--verbose', action='store_true')
|
||||
parser.add_argument('--summary', action='store_true')
|
||||
parser.add_argument('--sections', default='bdt')
|
||||
parser.add_argument('--fast', action='store_true')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.text_groups:
|
||||
args.sections = 't'
|
||||
|
||||
tintin_dir = os.path.join(os.path.dirname(__file__), '..')
|
||||
elf_path = os.path.join(tintin_dir, 'build', 'src', 'fw', 'tintin_fw.elf')
|
||||
|
||||
sections = analyze_elf(elf_path, args.sections, args.fast)
|
||||
|
||||
analyze_map(os.path.join(tintin_dir, 'build', 'tintin_fw.map'), sections)
|
||||
|
||||
if args.text_groups:
|
||||
print_groups(sections['t'], args.verbose)
|
||||
else:
|
||||
for s in args.sections:
|
||||
sections[s].pprint(args.summary, args.verbose)
|
||||
|
84
tools/analyze_mcu_flash_config.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import binutils
|
||||
import os.path
|
||||
import sh
|
||||
|
||||
|
||||
class Config(object):
|
||||
def abs_path(self, script_relative_path):
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)),
|
||||
script_relative_path)
|
||||
|
||||
def default_elf_abs_path(self):
|
||||
return self.abs_path(self.rel_elf_path())
|
||||
|
||||
def rel_elf_path(self):
|
||||
raise Exception("Implement me!")
|
||||
|
||||
def lib_paths(self):
|
||||
return []
|
||||
|
||||
def lib_symbols(self):
|
||||
# Array of tuples (use_fast, lib_path):
|
||||
lib_paths = self.lib_paths()
|
||||
|
||||
def extract_symbols(object_path):
|
||||
nm = binutils.nm_generator(object_path)
|
||||
return set([s for _, _, s, _, _, _ in nm])
|
||||
|
||||
return {path: extract_symbols(path)
|
||||
for path in lib_paths}
|
||||
|
||||
def memory_region_to_analyze(self):
|
||||
raise Exception("Implement me!")
|
||||
|
||||
def apply_tree_tweaks(self, tree):
|
||||
pass
|
||||
|
||||
|
||||
class TintinElfConfig(Config):
|
||||
def rel_elf_path(self):
|
||||
return '../build/src/fw/tintin_fw.elf'
|
||||
|
||||
def memory_region_to_analyze(self):
|
||||
FLASH_START = 0x8008000
|
||||
FLASH_END = FLASH_START + (512 * 1024)
|
||||
FLASH_REGION = (FLASH_START, FLASH_END)
|
||||
return FLASH_REGION
|
||||
|
||||
def lib_paths(self):
|
||||
return []
|
||||
|
||||
def apply_tree_tweaks(self, tree):
|
||||
# Manually add in the bootloader + gap:
|
||||
tree["Bootloader"] = 0x8000
|
||||
tree["Bootloader-FW-Gap"] = 0x8000
|
||||
|
||||
|
||||
class DialogElfConfig(Config):
|
||||
def memory_region_to_analyze(self):
|
||||
# Just spelling out both regions here in case someone wants to tweak:
|
||||
sysram_start = 0x7FC0000
|
||||
sysram_end = sysram_start + (128 * 1024)
|
||||
cacheram_start = 0x7FE0000
|
||||
cacheram_end = cacheram_start + (16 * 1024)
|
||||
return (sysram_start, cacheram_end)
|
||||
|
||||
|
||||
CONFIG_CLASSES = {
|
||||
'tintin': TintinElfConfig,
|
||||
}
|
142
tools/analyze_mcu_flash_find_unclaimed.py
Executable file
|
@ -0,0 +1,142 @@
|
|||
#!/usr/bin/python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from analyze_mcu_flash_config import *
|
||||
|
||||
import argparse
|
||||
import binutils
|
||||
import sh
|
||||
|
||||
|
||||
def contains(a, b):
|
||||
""" True if b is inside a """
|
||||
return b[0] >= a[0] and b[1] <= a[1]
|
||||
|
||||
|
||||
def claim(c, unclaimed_regions, symbol):
|
||||
""" Removes region (c_start, c_end) from the set of unclaimed_regions
|
||||
Return True if the region was sucessfully removed, False if it was
|
||||
already claimed.
|
||||
|
||||
"""
|
||||
if c[0] == c[1]:
|
||||
raise Exception("Invalid region: 0 size! %s" % c)
|
||||
|
||||
for u in unclaimed_regions:
|
||||
if contains(u, c):
|
||||
unclaimed_regions.remove(u)
|
||||
|
||||
# Defensive programming:
|
||||
if c[0] < u[0]:
|
||||
raise Exception("WTF! %s %s" % (u, c))
|
||||
if c[1] > u[1]:
|
||||
raise Exception("WTF! %s %s" % (u, c))
|
||||
|
||||
if u[0] != c[0]:
|
||||
# Lower edge of the claimed region does not overlap with
|
||||
# the unclaimed region. Add a piece of unclaimed padding:
|
||||
unclaimed_regions.add((u[0], c[0]))
|
||||
if u[1] != c[1]:
|
||||
# Upper edge of the claimed region does not overlap with
|
||||
# the unclaimed region. Add a piece of unclaimed padding:
|
||||
unclaimed_regions.add((c[1], u[1]))
|
||||
return True
|
||||
|
||||
print "Warning: doubly claimed %s, 0x%08x - 0x%08x?" % (symbol, c[0], c[1])
|
||||
return False
|
||||
|
||||
|
||||
if (__name__ == '__main__'):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--verbose', action='store_true')
|
||||
parser.add_argument('--dump', action='store_true',
|
||||
help='objdump unclaimed regions')
|
||||
parser.add_argument('--fast', action='store_true')
|
||||
parser.add_argument(
|
||||
'--config', default='tintin', choices=CONFIG_CLASSES.keys())
|
||||
parser.add_argument('elf_file', nargs='?')
|
||||
args = parser.parse_args()
|
||||
|
||||
config_class = CONFIG_CLASSES[args.config]
|
||||
config = config_class()
|
||||
|
||||
elf_file = args.elf_file
|
||||
if not elf_file:
|
||||
elf_file = config.default_elf_abs_path()
|
||||
|
||||
# The set of (addr_start, addr_end) tuples that we use to keep track of
|
||||
# unclaimed space in the flash:
|
||||
unclaimed_regions = set([config.memory_region_to_analyze()])
|
||||
|
||||
# Using arm-none-eabi-nm, 'claim' all .text symbols by removing the regions
|
||||
# from the unclaimed_regions set
|
||||
symbols = binutils.nm_generator(elf_file, args.fast)
|
||||
bytes_claimed = 0
|
||||
for addr, section, symbol, src_path, line, size in symbols:
|
||||
if section != 't':
|
||||
continue
|
||||
c = (addr, addr + size)
|
||||
if not contains(config.memory_region_to_analyze(), c):
|
||||
raise Exception("Not in memory region: %s 0x%08x - 0x%08x" %
|
||||
(symbol, c[0], c[1]))
|
||||
claim(c, unclaimed_regions, symbol)
|
||||
bytes_claimed += size
|
||||
|
||||
# Using the resulting map of unused space,
|
||||
# calculate the total unclaimed space:
|
||||
bytes_unclaimed = 0
|
||||
for u in unclaimed_regions:
|
||||
bytes_unclaimed += u[1] - u[0]
|
||||
|
||||
# Print out the results
|
||||
text_size = binutils.size(elf_file)[0]
|
||||
region = config.memory_region_to_analyze()
|
||||
print "------------------------------------------------------------"
|
||||
print ".text: %u" % text_size
|
||||
print "unclaimed memory: %u" % bytes_unclaimed
|
||||
print "claimed memory: %u" % bytes_claimed
|
||||
print "unknown .text regions %u" % (text_size - bytes_claimed)
|
||||
print ""
|
||||
print "These should add up:"
|
||||
print "bytes_unclaimed + bytes_claimed = %u" % (bytes_unclaimed +
|
||||
bytes_claimed)
|
||||
print "REGION_END - REGION_START = %u" % (region[1] - region[0])
|
||||
print ""
|
||||
|
||||
num = 30
|
||||
print "------------------------------------------------------------"
|
||||
print "Top %u unclaimed memory regions:" % num
|
||||
|
||||
def comparator(a, b):
|
||||
return cmp(a[1] - a[0], b[1] - b[0])
|
||||
unclaimed_sorted_by_size = sorted(unclaimed_regions,
|
||||
cmp=comparator, reverse=True)
|
||||
for x in xrange(0, num):
|
||||
region = unclaimed_sorted_by_size[x]
|
||||
size = region[1] - region[0]
|
||||
if args.dump:
|
||||
print "-----------------------------------------------------------"
|
||||
print "%u bytes @ 0x%08x" % (size, region[0])
|
||||
print ""
|
||||
print sh.arm_none_eabi_objdump('-S',
|
||||
'--start-address=0x%x' % region[0],
|
||||
'--stop-address=0x%x' % region[1],
|
||||
elf_file)
|
||||
else:
|
||||
print "%u bytes @ 0x%08x" % (size, region[0])
|
||||
|
||||
print "------------------------------------------------------------"
|
||||
print "Unclaimed regions are regions that did map to symbols in the .elf."
|
292
tools/analyze_mcu_flash_usage_treemap.html
Normal file
|
@ -0,0 +1,292 @@
|
|||
<!DOCTYPE html>
|
||||
<!--
|
||||
Copyright 2024 Google LLC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<html class="ocks-org do-not-copy">
|
||||
<meta charset="utf-8">
|
||||
<title>Pebble codespace</title>
|
||||
<style>
|
||||
|
||||
* {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
border: none 0px;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
#chart {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
left: 0px;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background: #ddd;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
header {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
right: 0px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
footer {
|
||||
position: absolute;
|
||||
bottom: 0px;
|
||||
right: 0px;
|
||||
}
|
||||
|
||||
text {
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.grandparent text {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
rect {
|
||||
fill: none;
|
||||
stroke: #fff;
|
||||
}
|
||||
|
||||
rect.parent,
|
||||
.grandparent rect {
|
||||
stroke-width: 2px;
|
||||
}
|
||||
|
||||
.grandparent rect {
|
||||
fill: orange;
|
||||
}
|
||||
|
||||
.grandparent:hover rect {
|
||||
fill: #ee9700;
|
||||
}
|
||||
|
||||
.children rect.parent,
|
||||
.grandparent rect {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.children rect.parent {
|
||||
fill: #bbb;
|
||||
fill-opacity: .5;
|
||||
}
|
||||
|
||||
.children:hover rect.child {
|
||||
fill: #bbb;
|
||||
}
|
||||
|
||||
</style>
|
||||
|
||||
<header>
|
||||
<h1>Pebble codespace: where did it all go?</h1>
|
||||
Click an gray area to zoom in, click the orange header to zoom out.
|
||||
</header>
|
||||
|
||||
<p id="chart">
|
||||
|
||||
<footer>
|
||||
<ul>
|
||||
<li>Unclaimed: space for which no symbols were found in the .elf</li>
|
||||
<li>Unknown: symbols could not be mapped to source files from the .elf</li>
|
||||
</ul>
|
||||
<p><a href="http://bost.ocks.org/mike/treemap/">TreeMap visualization code borrowed from Mike Bostock</a></p>
|
||||
</footer>
|
||||
|
||||
<script src="http://d3js.org/d3.v3.min.js"></script>
|
||||
<script>
|
||||
|
||||
var margin = {top: 20, right: 0, bottom: 0, left: 0},
|
||||
width = window.innerWidth,
|
||||
height = window.innerHeight - margin.top - margin.bottom,
|
||||
formatNumber = d3.format(",d"),
|
||||
transitioning;
|
||||
|
||||
var x = d3.scale.linear()
|
||||
.domain([0, width])
|
||||
.range([0, width]);
|
||||
|
||||
var y = d3.scale.linear()
|
||||
.domain([0, height])
|
||||
.range([0, height]);
|
||||
|
||||
var treemap = d3.layout.treemap()
|
||||
.children(function(d, depth) { return depth ? null : d._children; })
|
||||
.sort(function(a, b) { return a.value - b.value; })
|
||||
.ratio(height / width * 0.5 * (1 + Math.sqrt(5)))
|
||||
.round(false);
|
||||
|
||||
var svg = d3.select("#chart").append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.bottom + margin.top)
|
||||
.style("margin-left", -margin.left + "px")
|
||||
.style("margin.right", -margin.right + "px")
|
||||
.append("g")
|
||||
.attr("transform", "translate(" + margin.left + "," + margin.top + ")")
|
||||
.style("shape-rendering", "crispEdges");
|
||||
|
||||
var grandparent = svg.append("g")
|
||||
.attr("class", "grandparent");
|
||||
|
||||
grandparent.append("rect")
|
||||
.attr("y", -margin.top)
|
||||
.attr("width", width)
|
||||
.attr("height", margin.top);
|
||||
|
||||
grandparent.append("text")
|
||||
.attr("x", 6)
|
||||
.attr("y", 6 - margin.top)
|
||||
.attr("dy", ".75em");
|
||||
|
||||
function renderJson(root) {
|
||||
initialize(root);
|
||||
accumulate(root);
|
||||
layout(root);
|
||||
display(root);
|
||||
|
||||
function initialize(root) {
|
||||
root.x = root.y = 0;
|
||||
root.dx = width;
|
||||
root.dy = height;
|
||||
root.depth = 0;
|
||||
}
|
||||
|
||||
// Aggregate the values for internal nodes. This is normally done by the
|
||||
// treemap layout, but not here because of our custom implementation.
|
||||
// We also take a snapshot of the original children (_children) to avoid
|
||||
// the children being overwritten when when layout is computed.
|
||||
function accumulate(d) {
|
||||
return (d._children = d.children)
|
||||
? d.value = d.children.reduce(function(p, v) { return p + accumulate(v); }, 0)
|
||||
: d.value;
|
||||
}
|
||||
|
||||
// Compute the treemap layout recursively such that each group of siblings
|
||||
// uses the same size (1×1) rather than the dimensions of the parent cell.
|
||||
// This optimizes the layout for the current zoom state. Note that a wrapper
|
||||
// object is created for the parent node for each group of siblings so that
|
||||
// the parent’s dimensions are not discarded as we recurse. Since each group
|
||||
// of sibling was laid out in 1×1, we must rescale to fit using absolute
|
||||
// coordinates. This lets us use a viewport to zoom.
|
||||
function layout(d) {
|
||||
if (d._children) {
|
||||
treemap.nodes({_children: d._children});
|
||||
d._children.forEach(function(c) {
|
||||
c.x = d.x + c.x * d.dx;
|
||||
c.y = d.y + c.y * d.dy;
|
||||
c.dx *= d.dx;
|
||||
c.dy *= d.dy;
|
||||
c.parent = d;
|
||||
layout(c);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function display(d) {
|
||||
grandparent
|
||||
.datum(d.parent)
|
||||
.on("click", transition)
|
||||
.select("text")
|
||||
.text(name(d));
|
||||
|
||||
var g1 = svg.insert("g", ".grandparent")
|
||||
.datum(d)
|
||||
.attr("class", "depth");
|
||||
|
||||
var g = g1.selectAll("g")
|
||||
.data(d._children)
|
||||
.enter().append("g");
|
||||
|
||||
g.filter(function(d) { return d._children; })
|
||||
.classed("children", true)
|
||||
.on("click", transition);
|
||||
|
||||
g.selectAll(".child")
|
||||
.data(function(d) { return d._children || [d]; })
|
||||
.enter().append("rect")
|
||||
.attr("class", "child")
|
||||
.call(rect);
|
||||
|
||||
g.append("rect")
|
||||
.attr("class", "parent")
|
||||
.call(rect)
|
||||
.append("title")
|
||||
.text(function(d) { return formatNumber(d.value); });
|
||||
|
||||
g.append("text")
|
||||
.attr("dy", ".75em")
|
||||
.text(function(d) { return d.name + ' ' + d.value; })
|
||||
.call(text);
|
||||
|
||||
function transition(d) {
|
||||
if (transitioning || !d) return;
|
||||
transitioning = true;
|
||||
|
||||
var g2 = display(d),
|
||||
t1 = g1.transition().duration(750),
|
||||
t2 = g2.transition().duration(750);
|
||||
|
||||
// Update the domain only after entering new elements.
|
||||
x.domain([d.x, d.x + d.dx]);
|
||||
y.domain([d.y, d.y + d.dy]);
|
||||
|
||||
// Enable anti-aliasing during the transition.
|
||||
svg.style("shape-rendering", null);
|
||||
|
||||
// Draw child nodes on top of parent nodes.
|
||||
svg.selectAll(".depth").sort(function(a, b) { return a.depth - b.depth; });
|
||||
|
||||
// Fade-in entering text.
|
||||
g2.selectAll("text").style("fill-opacity", 0);
|
||||
|
||||
// Transition to the new view.
|
||||
t1.selectAll("text").call(text).style("fill-opacity", 0);
|
||||
t2.selectAll("text").call(text).style("fill-opacity", 1);
|
||||
t1.selectAll("rect").call(rect);
|
||||
t2.selectAll("rect").call(rect);
|
||||
|
||||
// Remove the old node when the transition is finished.
|
||||
t1.remove().each("end", function() {
|
||||
svg.style("shape-rendering", "crispEdges");
|
||||
transitioning = false;
|
||||
});
|
||||
}
|
||||
|
||||
return g;
|
||||
}
|
||||
|
||||
function text(text) {
|
||||
text.attr("x", function(d) { return x(d.x) + 6; })
|
||||
.attr("y", function(d) { return y(d.y) + 6; });
|
||||
}
|
||||
|
||||
function rect(rect) {
|
||||
rect.attr("x", function(d) { return x(d.x); })
|
||||
.attr("y", function(d) { return y(d.y); })
|
||||
.attr("width", function(d) { return x(d.x + d.dx) - x(d.x); })
|
||||
.attr("height", function(d) { return y(d.y + d.dy) - y(d.y); });
|
||||
}
|
||||
|
||||
function name(d) {
|
||||
return d.parent
|
||||
? name(d.parent) + "/" + d.name
|
||||
: d.name;
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
<script src="analyze_mcu_flash_usage_treemap.jsonp"></script>
|
163
tools/analyze_mcu_flash_usage_treemap.py
Executable file
|
@ -0,0 +1,163 @@
|
|||
#!/usr/bin/python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from analyze_mcu_flash_find_unclaimed import claim
|
||||
from analyze_mcu_flash_config import *
|
||||
import argparse
|
||||
import binutils
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import sh
|
||||
|
||||
|
||||
ROOT_PATH_SPLIT_RE = re.compile('\.?\.?/?([^/]+)/(.*)')
|
||||
|
||||
|
||||
def split_root_path(path):
|
||||
""" Takes a file path and returns a tuple: (root, rest_of_path)
|
||||
For example: "src/tintin/main.c" -> ("src", "tintin/main.c")
|
||||
"""
|
||||
match = ROOT_PATH_SPLIT_RE.match(path)
|
||||
if match:
|
||||
groups = match.groups()
|
||||
return (groups[0], groups[1])
|
||||
else:
|
||||
return (path, None)
|
||||
|
||||
|
||||
def tree_add_value(tree, path, value):
|
||||
""" Creates a subtree based on path in a given tree. Returns tree. """
|
||||
root, rest = split_root_path(path)
|
||||
if rest:
|
||||
# We haven't reached the leaf yet
|
||||
if root in tree:
|
||||
subtree = tree_add_value(tree[root], rest, value)
|
||||
tree[root].update(subtree)
|
||||
else:
|
||||
subtree = tree_add_value({}, rest, value)
|
||||
tree[root] = subtree
|
||||
else:
|
||||
# Leaf is reached!
|
||||
tree[root] = value
|
||||
return tree
|
||||
|
||||
|
||||
def generate_tree(f, additional_symbols, config):
|
||||
""" Generates a tree based on the output of arm-none-eabi-nm. The tree its
|
||||
branches are the folders and files in the code base. The leaves of the
|
||||
tree are symbols and their sizes. Only symbols from .text are included.
|
||||
The tree is represented with dict() objects, where the folder, file or
|
||||
symbol name are keys. In case of a folder or file, another dict() will
|
||||
be the value. In case of a symbol, the value is an int() of its size.
|
||||
"""
|
||||
symbols = binutils.nm_generator(f)
|
||||
unclaimed_regions = set([config.memory_region_to_analyze()])
|
||||
tree = {}
|
||||
total_size = 0
|
||||
for addr, section, symbol, src_path, line, size in symbols:
|
||||
if section != 't':
|
||||
# Not .text
|
||||
continue
|
||||
region = (addr, addr + size)
|
||||
if not claim(region, unclaimed_regions, symbol):
|
||||
# Region is already claimed by another symbol
|
||||
continue
|
||||
if not src_path or src_path == '?':
|
||||
src_path = '?'
|
||||
# Try to find the symbol in one of the additional symbol sets:
|
||||
for k in additional_symbols:
|
||||
if symbol in additional_symbols[k]:
|
||||
src_path = k
|
||||
break
|
||||
if symbol.startswith('sys_') or symbol.startswith('syscall'):
|
||||
src_path = 'build/src/fw/syscall.auto.s'
|
||||
break
|
||||
path = os.path.join(src_path, symbol)
|
||||
tree = tree_add_value(tree, path, size)
|
||||
total_size += size
|
||||
return (tree, total_size)
|
||||
|
||||
|
||||
def convert_tree_to_d3(parent_name, tree):
|
||||
""" Converts a tree as generated by generate_tree() to a dict() that
|
||||
can be converted to JSON to use with the d3.js graphing library.
|
||||
"""
|
||||
def convert_to_d3_node(parent_name, val):
|
||||
node = {'name': parent_name}
|
||||
val_type = type(val)
|
||||
if val_type is dict:
|
||||
node['children'] = [convert_to_d3_node(k, v)
|
||||
for k, v in val.iteritems()]
|
||||
elif val_type is int:
|
||||
node['value'] = val
|
||||
else:
|
||||
raise Exception("Unexpected node type: %s, "
|
||||
"parent_name=%s, val=%s" %
|
||||
(str(val_type), parent_name, val))
|
||||
return node
|
||||
return convert_to_d3_node(parent_name, tree)
|
||||
|
||||
|
||||
if (__name__ == '__main__'):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--verbose', action='store_true')
|
||||
parser.add_argument(
|
||||
'--config', default='tintin', choices=CONFIG_CLASSES.keys())
|
||||
parser.add_argument('elf_file', nargs='?')
|
||||
args = parser.parse_args()
|
||||
|
||||
config_class = CONFIG_CLASSES[args.config]
|
||||
config = config_class()
|
||||
|
||||
elf_file = args.elf_file
|
||||
if not elf_file:
|
||||
elf_file = config.default_elf_abs_path()
|
||||
|
||||
# Create the tree:
|
||||
lib_symbols = config.lib_symbols()
|
||||
tree, total_size = generate_tree(elf_file, lib_symbols, config)
|
||||
|
||||
# Unclaimed is space for which no symbols were found.
|
||||
# Run analyze_mcu_flash_find_unclaimed.py to get a dump of these regions.
|
||||
text_size = binutils.size(elf_file)[0]
|
||||
unclaimed_size = text_size - total_size
|
||||
if unclaimed_size:
|
||||
tree["Unclaimed"] = unclaimed_size
|
||||
|
||||
config.apply_tree_tweaks(tree)
|
||||
|
||||
# Convert to a structure that works with the d3.js graphing lib:
|
||||
d3_tree = convert_tree_to_d3('tintin', tree)
|
||||
|
||||
# Dump to .json file:
|
||||
json_filename = 'analyze_mcu_flash_usage_treemap.jsonp'
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
json_path = os.path.join(script_dir, json_filename)
|
||||
|
||||
file_out = open(json_path, 'wb')
|
||||
file_out.write("renderJson(")
|
||||
json.dump(d3_tree, file_out)
|
||||
file_out.write(");")
|
||||
file_out.close()
|
||||
|
||||
# Print out some stats:
|
||||
print "Total .text bytes: %u" % text_size
|
||||
print "Total bytes mapped: %u" % total_size
|
||||
print "-------------------------------------"
|
||||
print "Unaccounted bytes: %u" % (text_size - total_size)
|
||||
print ""
|
||||
print "Now go open %s.html to view treemap" % os.path.splitext(__file__)[0]
|
120
tools/analyze_pdebug_dat.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# This file can be used to process output from waf's extra tool parallel_debug.py that enables
|
||||
# the profiling of tasks in a waf build.
|
||||
#
|
||||
# To use this!
|
||||
# * Grab the parallel_debug.py file from the waf repo and throw it in your waftools directory
|
||||
# https://raw.githubusercontent.com/waf-project/waf/master/waflib/extras/parallel_debug.py
|
||||
# * Add "conf.load('parallel_debug', tooldir='waftools')" to the top of def configure in the
|
||||
# root wscript
|
||||
# * Run your build! This should produce a pdebug.data and pdebug.svg in your root
|
||||
# * Run this script with the current directory being the root of your repo
|
||||
#
|
||||
# Output will look something like the following:
|
||||
# c 5326 44.79% 251.37s 0.05s
|
||||
# .pbi 2496 40.42% 226.83s 0.09s
|
||||
# run_test 434 08.29% 46.55s 0.11s
|
||||
# clar_main.c,clar.h 434 03.68% 20.63s 0.05s
|
||||
# cprogram 436 02.56% 14.39s 0.03s
|
||||
# .png 40 00.14% 0.79s 0.02s
|
||||
# .pdc 16 00.03% 0.18s 0.01s
|
||||
# python -m unittest discover -s /Users/brad/pebble/tintin/t 2 00.03% 0.16s 0.08s
|
||||
# .apng 8 00.02% 0.13s 0.02s
|
||||
# .c 68 00.02% 0.09s 0.00s
|
||||
# pbi2png.py 2 00.01% 0.07s 0.03s
|
||||
# cstlib 2 00.01% 0.06s 0.03s
|
||||
#
|
||||
# Columns are task name, count, total percentage, total time, average time per task
|
||||
|
||||
tasks_by_thread = {}
|
||||
|
||||
|
||||
class Task(object):
|
||||
pass
|
||||
|
||||
# process all lines
|
||||
with open('pdebug.dat') as f:
|
||||
import csv
|
||||
reader = csv.reader(f, delimiter=' ')
|
||||
for row in reader:
|
||||
t = Task()
|
||||
t.thread_id = int(row[0])
|
||||
t.task_id = int(row[1])
|
||||
t.start_time = float(row[2])
|
||||
t.task_name = row[3]
|
||||
|
||||
if t.task_name.startswith("'"):
|
||||
t.task_name = t.task_name[1:]
|
||||
if t.task_name.endswith("'"):
|
||||
t.task_name = t.task_name[:-1]
|
||||
|
||||
thread_tasks = tasks_by_thread.setdefault(t.thread_id, [])
|
||||
thread_tasks.append(t)
|
||||
|
||||
# assign durations
|
||||
for thread_tasks in tasks_by_thread.values():
|
||||
for i in xrange(len(thread_tasks) - 1):
|
||||
thread_tasks[i].duration = thread_tasks[i+1].start_time - thread_tasks[i].start_time
|
||||
|
||||
# Can't guess the duration for the final task because the values only have start times :(
|
||||
thread_tasks[-1].duration = 0
|
||||
|
||||
# Flatten the dict into a big list
|
||||
all_tasks = [item for sublist in tasks_by_thread.values() for item in sublist]
|
||||
|
||||
tasks_by_task_type = {}
|
||||
for t in all_tasks:
|
||||
task_type_name = t.task_name
|
||||
|
||||
if task_type_name.endswith('.pbi'):
|
||||
task_type_name = '.pbi'
|
||||
elif task_type_name.endswith('.png'):
|
||||
task_type_name = '.png'
|
||||
elif task_type_name.endswith('.apng'):
|
||||
task_type_name = '.apng'
|
||||
elif task_type_name.endswith('.pdc'):
|
||||
task_type_name = '.pdc'
|
||||
elif task_type_name.endswith('.c'):
|
||||
task_type_name = '.c'
|
||||
|
||||
task_type_tasks = tasks_by_task_type.setdefault(task_type_name, [])
|
||||
task_type_tasks.append(t)
|
||||
|
||||
|
||||
class TaskType(object):
|
||||
pass
|
||||
|
||||
task_types = []
|
||||
total_duration = 0.0
|
||||
for task_type_name, tasks in tasks_by_task_type.items():
|
||||
tt = TaskType()
|
||||
|
||||
tt.name = task_type_name
|
||||
tt.total_duration = reduce(lambda accumulated, x: accumulated + x.duration, tasks, 0.0)
|
||||
tt.average_duration = tt.total_duration / len(tasks)
|
||||
tt.count = len(tasks)
|
||||
|
||||
task_types.append(tt)
|
||||
|
||||
total_duration += tt.total_duration
|
||||
|
||||
task_types.sort(key=lambda x: -x.total_duration)
|
||||
|
||||
for tt in task_types:
|
||||
percentage_of_total = (tt.total_duration / total_duration) * 100
|
||||
|
||||
print "%-60s %5u %05.2f%% %7.2fs %6.2fs" % \
|
||||
(tt.name[:58], tt.count, percentage_of_total, tt.total_duration, tt.average_duration)
|
136
tools/analyze_static_memory_layout.py
Normal file
|
@ -0,0 +1,136 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sh
|
||||
|
||||
class SectionInfo(object):
|
||||
def __init__(self, name, begin, end):
|
||||
self.name = name
|
||||
self.begin = begin
|
||||
self.end = end
|
||||
|
||||
def read_section_info(elf_file):
|
||||
section_headers_output = sh.arm_none_eabi_readelf('-S', elf_file)
|
||||
|
||||
line_pattern = re.compile(r"""\[\s*\d+\]\s+ # Number
|
||||
(\S+)\s+ # Name
|
||||
\S+\s+ # Type
|
||||
([0-9a-f]+)\s+ # Virtual Address
|
||||
[0-9a-f]+\s+ # Load Address
|
||||
([0-9a-f]+)\s+ # Size
|
||||
""", flags=re.VERBOSE)
|
||||
|
||||
sections = []
|
||||
|
||||
for line in section_headers_output:
|
||||
match = line_pattern.search(line)
|
||||
|
||||
if match is None:
|
||||
continue
|
||||
|
||||
name = match.group(1)
|
||||
addr = int(match.group(2), 16)
|
||||
size = int(match.group(3), 16)
|
||||
|
||||
if (addr < 0x20000000 or addr > 0x20030000) and (addr < 0x10000000 or addr > 0x10010000):
|
||||
# We only care about stuff that goes into ram or CCM
|
||||
continue
|
||||
|
||||
if (name == 'DISCARD'):
|
||||
continue
|
||||
|
||||
sections.append(SectionInfo(name, addr, addr + size))
|
||||
|
||||
return sections
|
||||
|
||||
def find_symbol(word):
|
||||
return re.compile(r"""\b({0})$""".format(word)).search
|
||||
|
||||
def read_layout_symbols(elf_file):
|
||||
symbols_output = sh.arm_none_eabi_objdump('-t', elf_file)
|
||||
|
||||
desired_symbols = [ 'system_stm32f4xx.c',
|
||||
'_heap_start',
|
||||
'_heap_end' ]
|
||||
|
||||
symbols = {}
|
||||
|
||||
line_pattern = re.compile(r"""^(\S+)""")
|
||||
|
||||
for line in symbols_output:
|
||||
for s in desired_symbols:
|
||||
if find_symbol(s)(line):
|
||||
match = line_pattern.search(line)
|
||||
symbols[s] = int(match.group(1), 16)
|
||||
|
||||
return symbols
|
||||
|
||||
def analyze_layout(elf_file):
|
||||
sections = read_section_info(elf_file)
|
||||
symbols = read_layout_symbols(elf_file)
|
||||
|
||||
ram_start_address = 0x20000000
|
||||
ram_end_address = 0x20020000
|
||||
|
||||
if 'system_stm32f4xx.c' in symbols:
|
||||
# We have a snowy! Adjust the RAM size to be larger
|
||||
ram_end_address = 0x20030000
|
||||
|
||||
# Add a dummy section that spans where the kernel heap is
|
||||
sections.append(SectionInfo('<KERNEL HEAP>', symbols['_heap_start'], symbols['_heap_end']))
|
||||
|
||||
sections = sorted(sections, key=lambda x: x.begin)
|
||||
|
||||
last_end = 0
|
||||
padding_total = 0
|
||||
|
||||
for s in sections:
|
||||
if last_end != 0 and last_end != s.begin:
|
||||
# There's a gap between sections! Use the last section's name to identify what it is.
|
||||
|
||||
padding_end = s.begin
|
||||
|
||||
# Don't insert a padding after CCM and before RAM
|
||||
if padding_end != ram_start_address:
|
||||
if last_name == '.worker_bss' or last_name == '.workerlib_paddin':
|
||||
name = 'WORKER CODE + HEAP'
|
||||
else:
|
||||
name = 'PADDING'
|
||||
padding_total += padding_end - last_end
|
||||
|
||||
print "0x%x - 0x%x %6u bytes <%s>" % \
|
||||
(last_end, padding_end, padding_end - last_end, name)
|
||||
|
||||
print "0x%x - 0x%x %6u bytes %s" % (s.begin, s.end, s.end - s.begin, s.name)
|
||||
|
||||
last_end = s.end
|
||||
last_name = s.name
|
||||
|
||||
# The app code + heap region doesn't have a section for it, it just takes up everything at the
|
||||
# end of the address space.
|
||||
print "0x%x - 0x%x %6u bytes <APP CODE + HEAP>" % (last_end, ram_end_address, ram_end_address - last_end)
|
||||
|
||||
print 'Total padding: %u bytes' % padding_total
|
||||
|
||||
if (__name__ == '__main__'):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('elf_file')
|
||||
args = parser.parse_args()
|
||||
|
||||
sections = analyze_layout(args.elf_file)
|
||||
|
31
tools/analyze_static_memory_usage.py
Executable file
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
from binutils import analyze_elf
|
||||
|
||||
if (__name__ == '__main__'):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--verbose', action='store_true')
|
||||
parser.add_argument('--summary', action='store_true')
|
||||
parser.add_argument('--fast', action='store_true')
|
||||
parser.add_argument('--sections', default='bdt')
|
||||
parser.add_argument('elf_file')
|
||||
args = parser.parse_args()
|
||||
|
||||
sections = analyze_elf(args.elf_file, args.sections, args.fast)
|
||||
|
||||
for s in sections.itervalues():
|
||||
s.pprint(args.summary, args.verbose)
|
51
tools/animation_timing_tables.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
# These constants are borrowed from animation_timing.h:
|
||||
ANIMATION_NORMALIZED_MAX = 65535
|
||||
ANIMATION_NORMALIZED_MIN = 0
|
||||
|
||||
# "Standard" animation curves, borrowed from the interwebs:
|
||||
d = ANIMATION_NORMALIZED_MAX
|
||||
b = ANIMATION_NORMALIZED_MIN
|
||||
c = ANIMATION_NORMALIZED_MAX
|
||||
|
||||
def easeInOut(t):
|
||||
t = t / (d / 2)
|
||||
if (t < 1.0):
|
||||
return c/2*t*t + b
|
||||
t -= 1
|
||||
return -c/2 * (t*(t-2) - 1) + b
|
||||
|
||||
def easeOut(t):
|
||||
t /= d
|
||||
return -c * t * (t - 2) + b
|
||||
|
||||
def easeIn(t):
|
||||
t /= d
|
||||
return c*t*t + b
|
||||
|
||||
def print_table(name, func):
|
||||
nums_per_row = 4
|
||||
table = [func(float(t)) for t in xrange(0, 65537, 2048)]
|
||||
print "static const uint16_t %s_table[33] = {" % name
|
||||
for i in xrange(0, len(table), nums_per_row):
|
||||
print ' ' + ', '.join(str(int(n)) for n in table[i:i+nums_per_row]) + ','
|
||||
print '};\n'
|
||||
|
||||
print_table('ease_in', easeIn)
|
||||
print_table('ease_out', easeOut)
|
||||
print_table('ease_in_out', easeInOut)
|
146
tools/app_header.py
Normal file
|
@ -0,0 +1,146 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import struct
|
||||
import uuid
|
||||
|
||||
|
||||
class PebbleAppHeader(object):
|
||||
MAGIC = 'PBLAPP\x00\x00'
|
||||
|
||||
# 10 bytes
|
||||
HEADER_STRUCT_DEFINITION = [
|
||||
'8s', # header
|
||||
'2B', # struct version
|
||||
]
|
||||
HEADER_STRUCT = struct.Struct(''.join(HEADER_STRUCT_DEFINITION))
|
||||
|
||||
# 116 bytes
|
||||
V1_STRUCT_VERSION = (0x08, 0x01)
|
||||
V1_STRUCT_DEFINTION = [
|
||||
# format, name, deserialization transform, serialization transform
|
||||
('B', 'sdk_version_major', None, None),
|
||||
('B', 'sdk_version_minor', None, None),
|
||||
('B', 'app_version_major', None, None),
|
||||
('B', 'app_version_minor', None, None),
|
||||
('H', 'app_size', None, None),
|
||||
('I', 'offset', None, None),
|
||||
('I', 'crc', None, None),
|
||||
('32s', 'app_name', lambda bytes: bytes.rstrip('\0'), None),
|
||||
('32s', 'company_name', lambda bytes: bytes.rstrip('\0'), None),
|
||||
('I', 'icon_resource_id', None, None),
|
||||
('I', 'symbol_table_addr', None, None),
|
||||
('I', 'flags', None, None),
|
||||
('I', 'relocation_list_index', None, None),
|
||||
('I', 'num_relocation_entries', None, None),
|
||||
('16s', 'uuid', lambda s: uuid.UUID(bytes=s), lambda u: u.bytes),
|
||||
]
|
||||
|
||||
# 120 bytes
|
||||
V2_STRUCT_VERSION = (0x10, 0x00)
|
||||
V2_STRUCT_DEFINTION = list(V1_STRUCT_DEFINTION)
|
||||
del V2_STRUCT_DEFINTION[12] # relocation list was dropped in v2.x
|
||||
V2_STRUCT_DEFINTION += [
|
||||
('I', 'resource_crc', None, None),
|
||||
('I', 'resource_timestamp', None, None),
|
||||
('H', 'virtual_size', None, None),
|
||||
]
|
||||
V2_HEADER_LENGTH = 10 + 120
|
||||
|
||||
DEFINITION_MAP = {
|
||||
V1_STRUCT_VERSION: V1_STRUCT_DEFINTION,
|
||||
V2_STRUCT_VERSION: V2_STRUCT_DEFINTION,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_def_and_struct(cls, struct_version):
|
||||
definition = cls.DEFINITION_MAP.get(struct_version)
|
||||
if not definition:
|
||||
raise Exception("Unsupported "
|
||||
"struct version %s" % str(struct_version))
|
||||
fmt = '<' + reduce(lambda s, t: s + t[0], definition, '')
|
||||
s = struct.Struct(fmt)
|
||||
return definition, s
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, app_bin):
|
||||
header_size = cls.HEADER_STRUCT.size
|
||||
header = app_bin[0:header_size]
|
||||
values = cls.HEADER_STRUCT.unpack(header)
|
||||
struct_version = (values[1], values[2])
|
||||
info = {
|
||||
'sentinel': values[0],
|
||||
'struct_version_major': struct_version[0],
|
||||
'struct_version_minor': struct_version[1],
|
||||
}
|
||||
|
||||
if info['sentinel'] != cls.MAGIC:
|
||||
raise Exception('This is not a pebble watchapp')
|
||||
|
||||
definition, s = cls.get_def_and_struct(struct_version)
|
||||
values = s.unpack(app_bin[header_size:header_size + s.size])
|
||||
for value, elem in zip(values, definition):
|
||||
field_name = elem[1]
|
||||
transform = elem[2]
|
||||
info[field_name] = value if not transform else transform(value)
|
||||
return info
|
||||
|
||||
def serialize(self):
|
||||
struct_version = (self._info['struct_version_major'],
|
||||
self._info['struct_version_minor'])
|
||||
header = PebbleAppHeader.HEADER_STRUCT.pack(PebbleAppHeader.MAGIC,
|
||||
*struct_version)
|
||||
|
||||
definition, s = self.__class__.get_def_and_struct(struct_version)
|
||||
|
||||
def map_args(elem):
|
||||
value = self._info[elem[1]]
|
||||
transform = elem[3]
|
||||
return value if not transform else transform(value)
|
||||
args = map(map_args, definition)
|
||||
|
||||
return header + s.pack(*args)
|
||||
|
||||
def __init__(self, app_bin_bytes):
|
||||
self._info = PebbleAppHeader.deserialize(app_bin_bytes)
|
||||
|
||||
def __getattr__(self, name):
|
||||
value = self._info.get(name)
|
||||
if value is None:
|
||||
raise Exception("Unknown field %s" % name)
|
||||
return value
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name == '_info':
|
||||
super(PebbleAppHeader, self).__setattr__(name, value)
|
||||
self._info[name] = value
|
||||
|
||||
def __repr__(self):
|
||||
return self._info.__repr__()
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
import pprint
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('bin_file')
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.bin_file, 'rb') as f:
|
||||
app_info = PebbleAppHeader.deserialize(f.read())
|
||||
|
||||
pprint.pprint(app_info)
|
171
tools/applib_malloc.py
Normal file
|
@ -0,0 +1,171 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sh
|
||||
import string
|
||||
|
||||
|
||||
class ApplibType(object):
|
||||
def __init__(self, type_dict):
|
||||
self.name = type_dict['name']
|
||||
|
||||
self.check_size = 1 # C preproc bool: 1 = true, 0 = false
|
||||
self.min_sdk = 0
|
||||
self.size_2x = type_dict.get('size_2x', 0)
|
||||
self.size_3x_direct_padding = type_dict.get('size_3x_padding', 0)
|
||||
self.size_3x = type_dict.get('size_3x', 0)
|
||||
|
||||
self.dependencies = type_dict.get('dependencies', [])
|
||||
|
||||
self.total_3x_padding = None
|
||||
|
||||
def get_total_3x_padding(self, all_types):
|
||||
""" Return the amount of padding to use for the 3x version of the struct including both
|
||||
the direct padding we add for this struct in particular as well as all padding needed
|
||||
for all dependenant structs.
|
||||
"""
|
||||
|
||||
if self.total_3x_padding is not None:
|
||||
# We have it cached, just return the previously calculated value
|
||||
return self.total_3x_padding
|
||||
|
||||
self.total_3x_padding = self.size_3x_direct_padding
|
||||
|
||||
for d in self.dependencies:
|
||||
parent = filter(lambda t: d == t.name, all_types)[0]
|
||||
self.total_3x_padding += parent.get_total_3x_padding(all_types)
|
||||
|
||||
return self.total_3x_padding
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s>' % (self.__class__.__name__, self.name)
|
||||
|
||||
|
||||
def get_types(data):
|
||||
return [ApplibType(t) for t in data['types'] if 'name' in t]
|
||||
|
||||
|
||||
def writeline(f, str=''):
|
||||
f.write(str + '\n')
|
||||
|
||||
|
||||
def write_template(f, filepath, replace):
|
||||
with open(filepath, 'r') as template_file:
|
||||
template = string.Template(template_file.read())
|
||||
f.write(template.safe_substitute(**replace) + '\n')
|
||||
|
||||
|
||||
def generate_header(data, output_filename):
|
||||
all_types = get_types(data)
|
||||
|
||||
with open(output_filename, 'w') as f:
|
||||
write_template(f, 'tools/applib_malloc.template.h', {
|
||||
'filename': output_filename,
|
||||
})
|
||||
|
||||
for t in all_types:
|
||||
write_template(f, 'tools/applib_malloc_type.template.h', t.__dict__)
|
||||
|
||||
|
||||
def generate_implementation(data, output_filename, min_sdk, disable_size_checks=False):
|
||||
all_types = get_types(data)
|
||||
with open(output_filename, 'w') as f:
|
||||
includes = ['#include "%s"' % h for h in data['headers']]
|
||||
applib_enum_types = ['ApplibType_%s' % t.name for t in all_types]
|
||||
applib_malloc_types = ['{ sizeof(%s), %u, %u }' % (t.name, t.size_2x, t.size_3x)
|
||||
for t in all_types]
|
||||
|
||||
write_template(f, 'tools/applib_malloc.template.c', {
|
||||
'filename': os.path.basename(output_filename),
|
||||
'includes': '\n'.join(includes),
|
||||
'applib_enum_types': ',\n '.join(applib_enum_types),
|
||||
'applib_malloc_types': ',\n '.join(applib_malloc_types),
|
||||
})
|
||||
|
||||
for t in all_types:
|
||||
t.min_sdk = min_sdk
|
||||
t.check_size = 0 if disable_size_checks else 1
|
||||
t.get_total_3x_padding(all_types) # Populate the value
|
||||
write_template(f, 'tools/applib_malloc_type.template.c', t.__dict__)
|
||||
|
||||
|
||||
def generate_files(json_filename, header_filename, impl_filename, min_sdk,
|
||||
disable_size_checks=False):
|
||||
with open(json_filename) as f:
|
||||
data = json.load(f)
|
||||
|
||||
generate_header(data, header_filename)
|
||||
generate_implementation(data, impl_filename, min_sdk, disable_size_checks)
|
||||
|
||||
|
||||
def _get_sizeof_type(elf_filename, typename):
|
||||
def _run_gdb(cmd):
|
||||
running_cmd = sh.arm_none_eabi_gdb(elf_filename, batch=True, nx=True, ex=cmd)
|
||||
result = str(running_cmd)
|
||||
|
||||
# Strip escape sequences if present
|
||||
if result[0] == '\x1b':
|
||||
result = result[8:]
|
||||
|
||||
return result.strip()
|
||||
|
||||
gdb_output = _run_gdb('p sizeof(%s)' % typename)
|
||||
|
||||
if len(gdb_output) == 0:
|
||||
# Sometimes gdb is dumb and fails at interpreting a typedef, try again with a struct prefix
|
||||
gdb_output = _run_gdb('p sizeof(struct %s)' % typename)
|
||||
|
||||
if len(gdb_output) == 0:
|
||||
raise Exception("Failed to get sizeof for type %s" % typename)
|
||||
|
||||
# Looks like "$1 = 44", we want the "44"
|
||||
return int(gdb_output.split()[2])
|
||||
|
||||
|
||||
def dump_sizes(json_filename, elf_filename):
|
||||
with open(json_filename) as f:
|
||||
data = json.load(f)
|
||||
|
||||
all_types = get_types(data)
|
||||
fmt_str = "%30s %10s %10s %10s %16s %16s %16s %s"
|
||||
|
||||
print fmt_str % ("Type", "sizeof()", "Size 2.x", "Size 3.x",
|
||||
"direct padding", "total padding", "calculated size", "dependencies")
|
||||
|
||||
for t in all_types:
|
||||
type_sizeof = _get_sizeof_type(elf_filename, t.name)
|
||||
|
||||
calculated_size = type_sizeof + t.get_total_3x_padding(all_types)
|
||||
if not t.size_3x or calculated_size == t.size_3x:
|
||||
calculated_size_str = str(calculated_size)
|
||||
else:
|
||||
calculated_size_str = "%u <%u>" % (calculated_size, (calculated_size - t.size_3x))
|
||||
|
||||
print fmt_str % (t.name, type_sizeof, t.size_2x, t.size_3x,
|
||||
t.size_3x_direct_padding, t.get_total_3x_padding(all_types),
|
||||
calculated_size_str, t.dependencies)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--json', type=str, default='src/fw/applib/applib_malloc.json',
|
||||
help="Specify the JSON file to use")
|
||||
parser.add_argument('--elf', type=str, default='build/src/fw/tintin_fw.elf',
|
||||
help="Specify the ELF file to use")
|
||||
|
||||
args = parser.parse_args()
|
||||
dump_sizes(args.json, args.elf)
|
118
tools/applib_malloc.template.c
Normal file
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* Copyright 2024 Google LLC
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
//! @file ${filename}
|
||||
//! This file is autogenerated by tools/applib_malloc.py
|
||||
|
||||
|
||||
#include "kernel/pbl_malloc.h"
|
||||
#include "kernel/pebble_tasks.h"
|
||||
#include "mcu/privilege.h"
|
||||
#include "process_management/process_manager.h"
|
||||
#include "system/passert.h"
|
||||
|
||||
|
||||
${includes}
|
||||
|
||||
|
||||
typedef struct {
|
||||
size_t actual_size; //!< The size of the struct as it's currently implemented without padding
|
||||
size_t size_2x; //!< The size we struct allocation should be for legacy2 apps
|
||||
size_t size_3x; //!< The size we struct allocation should be for 3.x apps
|
||||
} ApplibTypeInfo;
|
||||
|
||||
typedef enum {
|
||||
${applib_enum_types}
|
||||
} ApplibType;
|
||||
|
||||
|
||||
static const ApplibTypeInfo s_applib_malloc_types[] = {
|
||||
${applib_malloc_types}
|
||||
};
|
||||
|
||||
|
||||
static size_t prv_find_size(int index) {
|
||||
const ApplibTypeInfo *type = &s_applib_malloc_types[index];
|
||||
|
||||
if (mcu_state_is_thread_privileged()) {
|
||||
return type->actual_size;
|
||||
}
|
||||
|
||||
if (process_manager_compiled_with_legacy2_sdk()) {
|
||||
return type->size_2x;
|
||||
}
|
||||
|
||||
return type->size_3x;
|
||||
}
|
||||
|
||||
static void* prv_malloc(size_t size, uintptr_t client_pc) {
|
||||
if (!size) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#if defined(MALLOC_INSTRUMENTATION)
|
||||
void *result = task_malloc_with_pc(size, client_pc);
|
||||
#else
|
||||
void *result = task_malloc(size);
|
||||
#endif
|
||||
|
||||
if (!result && mcu_state_is_thread_privileged()) {
|
||||
// We want to trip an assert if our malloc failed and we're not running a 3rd party app.
|
||||
PBL_CROAK_OOM(size, client_pc, task_heap_get_for_current_task());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static void* prv_zalloc(size_t size, uintptr_t client_pc) {
|
||||
void *result = prv_malloc(size, client_pc);
|
||||
|
||||
if (result) {
|
||||
memset(result, 0, size);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void* applib_malloc(size_t size) {
|
||||
#if defined(MALLOC_INSTRUMENTATION)
|
||||
register uintptr_t lr __asm("lr");
|
||||
uintptr_t saved_lr = lr;
|
||||
return prv_malloc(size, saved_lr);
|
||||
#else
|
||||
return prv_malloc(size, 0);
|
||||
#endif
|
||||
}
|
||||
|
||||
void* applib_zalloc(size_t size) {
|
||||
#if defined(MALLOC_INSTRUMENTATION)
|
||||
register uintptr_t lr __asm("lr");
|
||||
uintptr_t saved_lr = lr;
|
||||
return prv_zalloc(size, saved_lr);
|
||||
#else
|
||||
return prv_zalloc(size, 0);
|
||||
#endif
|
||||
}
|
||||
|
||||
void applib_free(void *ptr) {
|
||||
#if defined(MALLOC_INSTRUMENTATION)
|
||||
register uintptr_t lr __asm("lr");
|
||||
uintptr_t saved_lr = lr;
|
||||
task_free_with_pc(ptr, saved_lr);
|
||||
#else
|
||||
task_free(ptr);
|
||||
#endif
|
||||
}
|
44
tools/applib_malloc.template.h
Normal file
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright 2024 Google LLC
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
//! @file ${filename}
|
||||
//! This file is autogenerated by tools/applib_malloc.py
|
||||
|
||||
|
||||
//! Allocate memory for applib for a given size
|
||||
void* applib_malloc(size_t size);
|
||||
|
||||
//! Allocate memory for applib for a given size and zero the buffer
|
||||
void* applib_zalloc(size_t size);
|
||||
|
||||
//! Deallocate memory previously allocated using applib_malloc
|
||||
void applib_free(void *ptr);
|
||||
|
||||
//! Allocate the appropriate amount of memory for the given type.
|
||||
#define applib_type_malloc(malloc_type) \
|
||||
_applib_type_malloc_## malloc_type()
|
||||
|
||||
//! Allocate the appropriate amount of memory for the given type.
|
||||
#define applib_type_zalloc(malloc_type) \
|
||||
_applib_type_zalloc_## malloc_type()
|
||||
|
||||
//! Get the size we should allocate for the given type.
|
||||
#define applib_type_size(malloc_type) \
|
||||
_applib_type_size_## malloc_type()
|
53
tools/applib_malloc_type.template.c
Normal file
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright 2024 Google LLC
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// ${name}
|
||||
///////////////////////////////////////////
|
||||
|
||||
#if (${check_size} && ${min_sdk} <= 2 && ${size_2x} > 0)
|
||||
_Static_assert(sizeof(${name}) <= ${size_2x}, "<${name}> is too large for 2.x");
|
||||
#endif
|
||||
|
||||
#if (${check_size} && ${size_3x} > 0)
|
||||
_Static_assert(sizeof(${name}) <= ${size_3x}, "<${name}> is too large for 3.x");
|
||||
_Static_assert(sizeof(${name}) + ${total_3x_padding} == ${size_3x},
|
||||
"<${name}> is incorrectly padded for 3.x, "
|
||||
"total padding: ${total_3x_padding} total size: ${size_3x}");
|
||||
#endif
|
||||
|
||||
void *_applib_type_malloc_${name}(void) {
|
||||
#if defined(MALLOC_INSTRUMENTATION)
|
||||
register uintptr_t lr __asm("lr");
|
||||
const uintptr_t saved_lr = lr;
|
||||
#else
|
||||
const uintptr_t saved_lr = 0;
|
||||
#endif
|
||||
return prv_malloc(prv_find_size(ApplibType_${name}), saved_lr);
|
||||
}
|
||||
|
||||
void *_applib_type_zalloc_${name}(void) {
|
||||
#if defined(MALLOC_INSTRUMENTATION)
|
||||
register uintptr_t lr __asm("lr");
|
||||
const uintptr_t saved_lr = lr;
|
||||
#else
|
||||
const uintptr_t saved_lr = 0;
|
||||
#endif
|
||||
return prv_zalloc(prv_find_size(ApplibType_${name}), saved_lr);
|
||||
}
|
||||
|
||||
size_t _applib_type_size_${name}(void) {
|
||||
return prv_find_size(ApplibType_${name});
|
||||
}
|
19
tools/applib_malloc_type.template.h
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright 2024 Google LLC
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
extern void* _applib_type_malloc_${name}(void);
|
||||
extern void* _applib_type_zalloc_${name}(void);
|
||||
extern size_t _applib_type_size_${name}(void);
|
1
tools/arc/config/.phutil_module_cache
Normal file
|
@ -0,0 +1 @@
|
|||
{"__symbol_cache_version__":11,"fb880a119e72c5d947d16a216246686c":{"have":{"class":{"PebbleArcanistConfiguration":920}},"need":{"function":{"idx":1373,"id":1472,"pht":3316,"phutil_passthru":3404,"head":4161},"class":{"ArcanistConfiguration":956,"PhutilClassMapQuery":1479,"ArcanistUsageException":5251,"PhutilEditDistanceMatrix":6064,"ArcanistAliasWorkflow":2800},"class\/interface":{"ArcanistWorkflow":1766,"ArcanistConfigurationManager":2194,"PhutilConsole":2251,"PhutilEditDistanceMatrix":6868}},"xmap":{"PebbleArcanistConfiguration":["ArcanistConfiguration"]}}}
|
17
tools/arc/config/__phutil_library_init__.php
Normal file
|
@ -0,0 +1,17 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
phutil_register_library('pebble-arc-config', __FILE__);
|
32
tools/arc/config/__phutil_library_map__.php
Normal file
|
@ -0,0 +1,32 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
/**
|
||||
* This file is automatically generated. Use 'arc liberate' to rebuild it.
|
||||
*
|
||||
* @generated
|
||||
* @phutil-library-version 2
|
||||
*/
|
||||
phutil_register_library_map(array(
|
||||
'__library_version__' => 2,
|
||||
'class' => array(
|
||||
'PebbleArcanistConfiguration' => 'pebble/PebbleArcanistConfiguration.php',
|
||||
),
|
||||
'function' => array(),
|
||||
'xmap' => array(
|
||||
'PebbleArcanistConfiguration' => 'ArcanistConfiguration',
|
||||
),
|
||||
));
|
73
tools/arc/config/pebble/PebbleArcanistConfiguration.php
Normal file
|
@ -0,0 +1,73 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
/**
|
||||
* Runtime workflow configuration. In Arcanist, commands you type like
|
||||
* "arc diff" or "arc lint" are called "workflows". This class allows you to add
|
||||
* new workflows (and extend existing workflows) by subclassing it and then
|
||||
* pointing to your subclass in your project configuration.
|
||||
*
|
||||
* When specified as the **arcanist_configuration** class in your project's
|
||||
* ##.arcconfig##, your subclass will be instantiated (instead of this class)
|
||||
* and be able to handle all the method calls. In particular, you can:
|
||||
*
|
||||
* - create, replace, or disable workflows by overriding buildWorkflow()
|
||||
* and buildAllWorkflows();
|
||||
* - add additional steps before or after workflows run by overriding
|
||||
* willRunWorkflow() or didRunWorkflow() or didAbortWorkflow(); and
|
||||
* - add new flags to existing workflows by overriding
|
||||
* getCustomArgumentsForCommand().
|
||||
*
|
||||
* @concrete-extensible
|
||||
*/
|
||||
class PebbleArcanistConfiguration extends ArcanistConfiguration {
|
||||
const TREESTATUS_URL = "https://treestatus.marlinspike.hq.getpebble.com/api/ci/state/TT-MC/master";
|
||||
const FAIL_WHALE = "
|
||||
▄██████████████▄▐█▄▄▄▄█▌
|
||||
██████▌▄▌▄▐▐▌███▌▀▀██▀▀
|
||||
████▄█▌▄▌▄▐▐▌▀███▄▄█▌
|
||||
▄▄▄▄▄██████████████▀
|
||||
";
|
||||
/*
|
||||
* Implement the willRunWorkflow hook in order to check whether or not
|
||||
* master is green before allowing a diff to be landed
|
||||
*/
|
||||
public function willRunWorkflow($command, ArcanistWorkflow $workflow) {
|
||||
if ($workflow->getWorkflowName() == "land") {
|
||||
$build_status_str = file_get_contents(self::TREESTATUS_URL);
|
||||
$build_status = json_decode($build_status_str);
|
||||
|
||||
$console = PhutilConsole::getConsole();
|
||||
if ($build_status->is_open) {
|
||||
$console->writeOut(
|
||||
"**<bg:green> %s </bg>** %s\n",
|
||||
pht('Master OK!'),
|
||||
pht('Merging is allowed'));
|
||||
return;
|
||||
} else {
|
||||
$console->writeOut(
|
||||
"%s\n**<bg:red> %s </bg>** %s\n",
|
||||
pht(self::FAIL_WHALE),
|
||||
pht('Master Borked :('),
|
||||
pht('Don\'t land unless your diff fixes it!'));
|
||||
|
||||
if (!$console->confirm(pht('Land revision anyways?'))) {
|
||||
throw new ArcanistUserAbortException();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
8
tools/arc/linting/applib.rule
Normal file
|
@ -0,0 +1,8 @@
|
|||
<rule version="1">
|
||||
<pattern>\b(task_|app_)?[cmz]alloc(_check)?</pattern>
|
||||
<message>
|
||||
<id>applib_malloc</id>
|
||||
<severity>style</severity>
|
||||
<summary>For structures that are exported, we try to always use applib_malloc. Please consider using this paradigm. If you have any questions, consult Bradley Murray. He is great.</summary>
|
||||
</message>
|
||||
</rule>
|
17
tools/arc/linting/linters/__phutil_library_init__.php
Normal file
|
@ -0,0 +1,17 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
phutil_register_library('linters', __FILE__);
|
38
tools/arc/linting/linters/__phutil_library_map__.php
Normal file
|
@ -0,0 +1,38 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
/**
|
||||
* This file is automatically generated. Use 'arc liberate' to rebuild it.
|
||||
* @generated
|
||||
* @phutil-library-version 2
|
||||
*/
|
||||
|
||||
phutil_register_library_map(array(
|
||||
'__library_version__' => 2,
|
||||
'class' =>
|
||||
array(
|
||||
'PebbleCpplintLinter' => 'cpplint/PebbleCpplintLinter.php',
|
||||
'PebbleCppcheckLinter' => 'cppcheck/PebbleCppcheckLinter.php',
|
||||
),
|
||||
'function' =>
|
||||
array(
|
||||
),
|
||||
'xmap' =>
|
||||
array(
|
||||
'PebbleCpplintLinter' => 'ArcanistExternalLinter',
|
||||
'PebbleCppcheckLinter' => 'ArcanistExternalLinter',
|
||||
),
|
||||
));
|
134
tools/arc/linting/linters/cppcheck/PebbleCppcheckLinter.php
Normal file
|
@ -0,0 +1,134 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
/**
|
||||
* Uses Cppcheck to do basic checks in a C++ file.
|
||||
*/
|
||||
final class PebbleCppcheckLinter extends ArcanistExternalLinter {
|
||||
|
||||
public function getInfoName() {
|
||||
return 'Cppcheck-pebble';
|
||||
}
|
||||
|
||||
public function getInfoURI() {
|
||||
return 'http://cppcheck.sourceforge.net';
|
||||
}
|
||||
|
||||
public function getInfoDescription() {
|
||||
return pht('Use `cppcheck` to perform static analysis on C/C++ code.');
|
||||
}
|
||||
|
||||
public function getLinterName() {
|
||||
return 'cppcheck-pebble';
|
||||
}
|
||||
|
||||
public function getLinterConfigurationName() {
|
||||
return 'cppcheck-pebble';
|
||||
}
|
||||
|
||||
public function getDefaultBinary() {
|
||||
return 'cppcheck';
|
||||
}
|
||||
|
||||
public function getVersion() {
|
||||
list($stdout) = execx('%C --version', $this->getExecutableCommand());
|
||||
|
||||
$matches = array();
|
||||
$regex = '/^Cppcheck (?P<version>\d+\.\d+)$/';
|
||||
if (preg_match($regex, $stdout, $matches)) {
|
||||
return $matches['version'];
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public function getInstallInstructions() {
|
||||
return pht('Install Cppcheck using `apt-get install cppcheck` for Ubuntu'.
|
||||
' or `brew install cppcheck` for Mac OS X');
|
||||
}
|
||||
|
||||
protected function getMandatoryFlags() {
|
||||
return array(
|
||||
'--quiet',
|
||||
'--inline-suppr',
|
||||
'--xml',
|
||||
'--xml-version=2',
|
||||
);
|
||||
}
|
||||
|
||||
protected function getDefaultFlags() {
|
||||
return array('-j2',
|
||||
'--enable=performance,style,portability,information',
|
||||
'--library=tools/arc/linting/tintin.cfg,std',
|
||||
'--rule-file=tools/arc/linting/tintin.rule',
|
||||
'--enable=all',
|
||||
'--suppress=passedByValue',
|
||||
'--suppress=selfAssignment',
|
||||
'--suppress=toomanyconfigs',
|
||||
'--suppress=uninitStructMember',
|
||||
'--suppress=unnecessaryForwardDeclaration',
|
||||
'--suppress=unusedFunction',
|
||||
'--suppress=variableScope',
|
||||
'--suppress=unusedStructMember',
|
||||
'--suppress=varFuncNullUB',
|
||||
'--suppress=ConfigurationNotChecked');
|
||||
}
|
||||
|
||||
protected function getDefaultMessageSeverity($code) {
|
||||
return ArcanistLintSeverity::SEVERITY_WARNING;
|
||||
}
|
||||
|
||||
protected function parseLinterOutput($path, $err, $stdout, $stderr) {
|
||||
$dom = new DOMDocument();
|
||||
$ok = @$dom->loadXML($stderr);
|
||||
|
||||
if (!$ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
$errors = $dom->getElementsByTagName('error');
|
||||
$messages = array();
|
||||
foreach ($errors as $error) {
|
||||
foreach ($error->getElementsByTagName('location') as $location) {
|
||||
$message = new ArcanistLintMessage();
|
||||
$message->setPath($location->getAttribute('file'));
|
||||
$message->setLine($location->getAttribute('line'));
|
||||
$message->setCode($error->getAttribute('id'));
|
||||
$message->setName($error->getAttribute('id'));
|
||||
$message->setDescription($error->getAttribute('msg'));
|
||||
|
||||
$message->setSeverity($this->getLintMessageSeverity($error->getAttribute('id')));
|
||||
|
||||
$messages[] = $message;
|
||||
}
|
||||
}
|
||||
|
||||
return $messages;
|
||||
}
|
||||
|
||||
protected function getLintCodeFromLinterConfigurationKey($code) {
|
||||
if (!preg_match('@^[a-z_]+$@', $code)) {
|
||||
throw new Exception(
|
||||
pht(
|
||||
'Unrecognized severity code "%s". Expected a valid cppcheck '.
|
||||
'severity code like "%s" or "%s".',
|
||||
$code,
|
||||
'unreadVariable',
|
||||
'memleak'));
|
||||
}
|
||||
return $code;
|
||||
}
|
||||
}
|
105
tools/arc/linting/linters/cpplint/PebbleCpplintLinter.php
Normal file
|
@ -0,0 +1,105 @@
|
|||
<?php
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
/**
|
||||
* Uses Google's `cpplint.py` to check code.
|
||||
*/
|
||||
final class PebbleCpplintLinter extends ArcanistExternalLinter {
|
||||
|
||||
public function getLinterName() {
|
||||
return 'cpplint-pebble';
|
||||
}
|
||||
|
||||
public function getLinterConfigurationName() {
|
||||
return 'cpplint-pebble';
|
||||
}
|
||||
|
||||
public function getDefaultBinary() {
|
||||
return "tools/arc/linting/linters/cpplint/cpplint.py";
|
||||
}
|
||||
|
||||
public function getInstallInstructions() {
|
||||
return pht('Ask Tyler Hoffman to fix it.');
|
||||
}
|
||||
|
||||
public function shouldExpectCommandErrors() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public function supportsReadDataFromStdin() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public function getReadDataFromStdinFilename() {
|
||||
return '-';
|
||||
}
|
||||
|
||||
protected function getDefaultFlags() {
|
||||
return array();
|
||||
}
|
||||
|
||||
protected function getDefaultMessageSeverity($code) {
|
||||
return ArcanistLintSeverity::SEVERITY_WARNING;
|
||||
}
|
||||
|
||||
protected function parseLinterOutput($path, $err, $stdout, $stderr) {
|
||||
$lines = explode("\n", $stderr);
|
||||
|
||||
$messages = array();
|
||||
foreach ($lines as $line) {
|
||||
$line = trim($line);
|
||||
$matches = null;
|
||||
$regex = '/(\d+):\s*(.*)\s*\[(.*)\] \[(\d+)\]$/';
|
||||
if (!preg_match($regex, $line, $matches)) {
|
||||
continue;
|
||||
}
|
||||
foreach ($matches as $key => $match) {
|
||||
$matches[$key] = trim($match);
|
||||
}
|
||||
|
||||
$message = new ArcanistLintMessage();
|
||||
$message->setPath($path);
|
||||
$message->setLine($matches[1]);
|
||||
$message->setCode($matches[3]);
|
||||
$message->setName($matches[3]);
|
||||
$message->setDescription($matches[2]);
|
||||
$message->setSeverity($this->getLintMessageSeverity($matches[3]));
|
||||
|
||||
$messages[] = $message;
|
||||
}
|
||||
|
||||
if ($err && !$messages) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return $messages;
|
||||
}
|
||||
|
||||
protected function getLintCodeFromLinterConfigurationKey($code) {
|
||||
if (!preg_match('@^[a-z_]+/[a-z_]+$@', $code)) {
|
||||
throw new Exception(
|
||||
pht(
|
||||
'Unrecognized lint message code "%s". Expected a valid cpplint '.
|
||||
'lint code like "%s" or "%s".',
|
||||
$code,
|
||||
'build/include_order',
|
||||
'whitespace/braces'));
|
||||
}
|
||||
|
||||
return $code;
|
||||
}
|
||||
|
||||
}
|
142
tools/arc/linting/linters/fw_linter.py
Executable file
|
@ -0,0 +1,142 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import re
|
||||
|
||||
# FW linters for tintin in python!
|
||||
#
|
||||
# Adding a new linter is simple. Simply,
|
||||
# 1) Subclass FwLinter
|
||||
# 2) Define found_lint_error(self, filename, line) and return True iff an error is found on the
|
||||
# line passed in
|
||||
# 3) Define SEVERITY, NAME, MESSAGE as documented here:
|
||||
# https://secure.phabricator.com/book/phabricator/article/arcanist_lint_script_and_regex/
|
||||
|
||||
|
||||
class FwLinter(object):
|
||||
def construct_arcanist_error_string(self, severity, name, message, filename, line_num):
|
||||
return '|'.join([severity, name, message, filename, line_num])
|
||||
|
||||
def handle_lint_error(self, filename, line, line_num):
|
||||
""" Responsible for communicating the lint error to arcanist. Today, this just involves
|
||||
printing the message because 'arc lint' monitors stdout """
|
||||
print self.construct_arcanist_error_string(self.SEVERITY, self.NAME, self.MESSAGE,
|
||||
filename, str(line_num))
|
||||
|
||||
def found_lint_error(self, filename, line):
|
||||
""" Given a line, returns True if a lint error is found on the line and false otherwise """
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
#
|
||||
# FwLinter Subclasses
|
||||
#
|
||||
|
||||
class TodoFixmeLinter(FwLinter):
|
||||
SEVERITY = "ADVICE"
|
||||
NAME = "TODO/FIXME"
|
||||
MESSAGE = "TODO/FIXME Found. Just letting you know"
|
||||
|
||||
jira_ticket_id_regex = re.compile(r'PBL-\d+', re.IGNORECASE)
|
||||
|
||||
def found_lint_error(self, filename, line):
|
||||
line_lowercase = line.lower()
|
||||
return 'todo' in line_lowercase or 'fixme' in line_lowercase
|
||||
|
||||
def handle_lint_error(self, filename, line, line_num):
|
||||
message = self.MESSAGE
|
||||
# If we find a JIRA ticket ID in the line, add the full JIRA URL to the message
|
||||
jira_matches = self.jira_ticket_id_regex.findall(line)
|
||||
if jira_matches:
|
||||
jira_ticket_id = jira_matches[0]
|
||||
jira_base_url = 'https://pebbletechnology.atlassian.net/browse/'
|
||||
jira_url = jira_base_url + jira_ticket_id
|
||||
message = ' '.join([message, jira_url])
|
||||
print self.construct_arcanist_error_string(self.SEVERITY, self.NAME, message, filename,
|
||||
str(line_num))
|
||||
|
||||
|
||||
class UndefinedAttributeLinter(FwLinter):
|
||||
SEVERITY = "ERROR"
|
||||
NAME = "Undefined Attribute"
|
||||
MESSAGE = "yo, you need to include util/attributes.h if you want to PACK stuff"
|
||||
|
||||
attribute_inc_regex = re.compile(r'(^#include\s+[<\"]util/attributes.h[>\"])')
|
||||
|
||||
def __init__(self):
|
||||
self.include_found = False
|
||||
|
||||
def found_lint_error(self, filename, line):
|
||||
if self.attribute_inc_regex.findall(line) or '#define PACKED' in line:
|
||||
self.include_found = True
|
||||
return False
|
||||
elif ' PACKED ' in line and not self.include_found:
|
||||
return True
|
||||
|
||||
|
||||
class StaticFuncFormatLinter(FwLinter):
|
||||
SEVERITY = "WARNING"
|
||||
NAME = "Static Function Format Error"
|
||||
MESSAGE = "umm, you forgot to add 'prv_' or mark this function as 'static'"
|
||||
|
||||
func_proto_regex = re.compile(r'^(\w+)\W?.*\W(\w+\([a-zA-Z])')
|
||||
|
||||
def found_lint_error(self, filename, line):
|
||||
# Ignore header files
|
||||
if (filename.endswith(".h")):
|
||||
return False
|
||||
|
||||
matches = self.func_proto_regex.findall(line)
|
||||
|
||||
if matches and len(matches[0]) == 2:
|
||||
groups = matches[0]
|
||||
func_starts_with_prv = groups[1].startswith('prv_')
|
||||
func_is_static = any(x in groups[0] for x in ['static', 'T_STATIC'])
|
||||
|
||||
return ((func_is_static and not func_starts_with_prv) or
|
||||
(func_starts_with_prv and not func_is_static))
|
||||
return False
|
||||
|
||||
|
||||
class ColorFallbackDeprecatedMacroLinter(FwLinter):
|
||||
SEVERITY = "WARNING"
|
||||
NAME = "COLOR_FALLBACK() Deprecated Macro"
|
||||
MESSAGE = "The macro `COLOR_FALLBACK()` has been deprecated for internal firmware use. " \
|
||||
"Use the equivalent `PBL_IF_COLOR_ELSE()` macro instead. Unfortunately, we can't " \
|
||||
"simply remove `COLOR_FALLBACK()` from the firmware because it's exported in the SDK."
|
||||
|
||||
def found_lint_error(self, filename, line):
|
||||
return 'COLOR_FALLBACK' in line
|
||||
|
||||
#
|
||||
# Code to run our FW linters
|
||||
#
|
||||
|
||||
|
||||
def lint(filename):
|
||||
linters = [linter() for linter in FwLinter.__subclasses__()]
|
||||
with open(filename) as f:
|
||||
for i, line in enumerate(f.readlines()):
|
||||
line_num = i + 1
|
||||
for linter in linters:
|
||||
if linter.found_lint_error(filename, line):
|
||||
linter.handle_lint_error(filename, line, line_num)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
filename = sys.argv[1]
|
||||
lint(filename)
|
94
tools/arc/linting/tintin.cfg
Normal file
|
@ -0,0 +1,94 @@
|
|||
<?xml version="1.0"?>
|
||||
<def>
|
||||
|
||||
<!-- pbl_malloc.h -->
|
||||
|
||||
<memory>
|
||||
<alloc>malloc</alloc>
|
||||
<dealloc>free</dealloc>
|
||||
</memory>
|
||||
<memory>
|
||||
<alloc>kernel_malloc</alloc>
|
||||
<alloc>kernel_malloc_check</alloc>
|
||||
<dealloc>kernel_free</dealloc>
|
||||
</memory>
|
||||
<memory>
|
||||
<alloc>app_malloc</alloc>
|
||||
<alloc>app_malloc_check</alloc>
|
||||
<dealloc>app_free</dealloc>
|
||||
</memory>
|
||||
<memory>
|
||||
<alloc>task_malloc</alloc>
|
||||
<alloc>task_malloc_check</alloc>
|
||||
<dealloc>task_free</dealloc>
|
||||
</memory>
|
||||
<memory>
|
||||
<alloc>applib_type_malloc</alloc>
|
||||
<alloc>applib_malloc</alloc>
|
||||
<dealloc>applib_free</dealloc>
|
||||
</memory>
|
||||
|
||||
<!-- Layers -->
|
||||
|
||||
<resource>
|
||||
<alloc>action_bar_layer_create</alloc>
|
||||
<dealloc>action_bar_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>bitmap_layer_create</alloc>
|
||||
<dealloc>bitmap_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>inverter_layer_create</alloc>
|
||||
<dealloc>inverter_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>menu_layer_create</alloc>
|
||||
<dealloc>menu_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>rot_bitmap_layer_create</alloc>
|
||||
<dealloc>rot_bitmap_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>scroll_layer_create</alloc>
|
||||
<dealloc>scroll_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>simple_menu_layer_create</alloc>
|
||||
<dealloc>simple_menu_layer_destroy</dealloc>
|
||||
</resource>
|
||||
<resource>
|
||||
<alloc>text_layer_create</alloc>
|
||||
<dealloc>text_layer_destroy</dealloc>
|
||||
</resource>
|
||||
|
||||
<!-- Animations -->
|
||||
|
||||
<resource>
|
||||
<alloc>property_animation_create</alloc>
|
||||
<dealloc>property_animation_destroy</dealloc>
|
||||
</resource>
|
||||
|
||||
<!-- Windows -->
|
||||
|
||||
<resource>
|
||||
<alloc>window_create</alloc>
|
||||
<dealloc>window_destroy</dealloc>
|
||||
</resource>
|
||||
|
||||
<!-- Graphics -->
|
||||
|
||||
<resource>
|
||||
<alloc>gpath_create</alloc>
|
||||
<dealloc>gpath_destroy</dealloc>
|
||||
</resource>
|
||||
|
||||
<!-- Weather -->
|
||||
|
||||
<memory>
|
||||
<alloc>watch_app_prefs_get_weather</alloc>
|
||||
<dealloc>watch_app_prefs_destroy_weather</dealloc>
|
||||
</memory>
|
||||
|
||||
</def>
|
43
tools/arc/linting/tintin.rule
Normal file
|
@ -0,0 +1,43 @@
|
|||
<?xml version="1.0"?>
|
||||
|
||||
<rule version="1">
|
||||
<pattern>PBL_ASSERT_OOM</pattern>
|
||||
<message>
|
||||
<id>pbl_assert_oom</id>
|
||||
<severity>style</severity>
|
||||
<summary>PBL_ASSERT_OOM is a large macro. Consider using the *_malloc_check versions or checking for NULL instead.</summary>
|
||||
</message>
|
||||
</rule>
|
||||
|
||||
<rule version="1">
|
||||
<pattern>snprintf</pattern>
|
||||
<message>
|
||||
<id>snprintf</id>
|
||||
<severity>style</severity>
|
||||
<summary>The function 'snprintf' uses up a lot of our precious stack space. A simpler set of strcpy/strcat functions might be a better approach. If it is not being used deep in the stack, continue on your merry way.</summary>
|
||||
</message>
|
||||
</rule>
|
||||
|
||||
<rule version="1">
|
||||
<pattern>psleep</pattern>
|
||||
<message>
|
||||
<id>psleep</id>
|
||||
<severity>style</severity>
|
||||
<summary>The function 'psleep' can delay for less than the interval
|
||||
specified. In fact, psleep(1) can actually wind up taking no time at
|
||||
all. (Check out sleep.h if you don't believe me!) Also note, psleep(0) forces a reschedule but will starve low priority
|
||||
tasks if you do it in a while loop! So double check your delay and carry on!</summary>
|
||||
</message>
|
||||
</rule>
|
||||
|
||||
<rule version="1">
|
||||
<pattern>\b(\w+_)?malloc(_check)?\b</pattern>
|
||||
<message>
|
||||
<id>malloc</id>
|
||||
<severity>style</severity>
|
||||
<summary>The function 'malloc' returns uninitialized memory while callers often (sometimes indirectly) assume that the allocated memory is filled with bytes of value zero. If in doubt, use `zalloc` instead. In most cases, the performance impact of this extra processing is negligible.</summary>
|
||||
</message>
|
||||
</rule>
|
||||
|
||||
|
||||
<!-- More can be added here -->
|
191
tools/audio_recording.py
Normal file
|
@ -0,0 +1,191 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import array
|
||||
import glob
|
||||
import serial
|
||||
import struct
|
||||
import sys
|
||||
|
||||
import prompt
|
||||
import stm32_crc as crc
|
||||
from hdlc import HDLCDecoder
|
||||
from serial_port_wrapper import SerialPortWrapper
|
||||
|
||||
'''
|
||||
This script will invoke the microphone test command on snowy, read the
|
||||
PCM encoded audio data from the serial port and store it to a .wav file,
|
||||
which can be played back from any audio player. It can be invoked via
|
||||
the './waf record' command or by running the script from the command line.
|
||||
|
||||
The output sample rate and sample size when recording using the waf script
|
||||
can be configured by modifying the 'freq' and 'sample_size' variables,
|
||||
respectively.
|
||||
'''
|
||||
|
||||
FREQ = 8000 # Default Output sample rate from microphone (8000 or 16000)
|
||||
SAMPLE_SIZE = 2 # Default sample size: 1 for 8-bit data, 2 for 16-bit data samples (must match what is output by watch)
|
||||
seconds = 1
|
||||
|
||||
def store_wav(samples, filename, sample_size, freq):
|
||||
size = len(samples) * sample_size
|
||||
width = 8 * sample_size
|
||||
byte_rate = freq * sample_size
|
||||
|
||||
wav = "RIFF"
|
||||
wav += struct.pack('i', size + 44) #file size
|
||||
wav += "WAVE"
|
||||
wav += "fmt "
|
||||
wav += struct.pack('i', 16) # fmt data length
|
||||
wav += struct.pack('h', 1) # audio format (1 = PCM)
|
||||
wav += struct.pack('h', 1) # channels
|
||||
wav += struct.pack('i', freq) # sample rate
|
||||
wav += struct.pack('i', byte_rate) # byte rate
|
||||
wav += struct.pack('h', sample_size) # block alignment (bytes per block)
|
||||
wav += struct.pack('h', width) # bits per sample
|
||||
wav += "data"
|
||||
wav += struct.pack('i', size) # data size
|
||||
|
||||
for sample in samples:
|
||||
wav += struct.pack('B' if sample_size == 1 else 'H', sample)
|
||||
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(wav)
|
||||
f.close()
|
||||
print '{0} samples packed into {1}'.format(len(samples), filename)
|
||||
|
||||
def receive_hdlc_data(s, sample_size):
|
||||
count = { 'i': 0 }
|
||||
|
||||
decoder = HDLCDecoder()
|
||||
data = []
|
||||
while True:
|
||||
raw_data = s.read(1000)
|
||||
if len(raw_data) == 0:
|
||||
break
|
||||
decoder.write(raw_data)
|
||||
for frame in iter(decoder.get_frame, None):
|
||||
count['i'] += 1
|
||||
if len(frame) > 4:
|
||||
frame = bytearray(frame)
|
||||
frame_crc = frame[-4] | (frame[-3] << 8) | (frame[-2] << 16) | (frame[-1] << 24)
|
||||
if crc.crc32(array.array('B', frame[:-4]).tostring()) == frame_crc:
|
||||
data.extend(frame[:-4])
|
||||
else:
|
||||
print count['i']
|
||||
|
||||
print 'total frames received =', str(count['i'])
|
||||
samples = []
|
||||
for i in range(0, len(data), sample_size):
|
||||
try:
|
||||
d = 0
|
||||
if sample_size == 1:
|
||||
d = data[i]
|
||||
elif sample_size == 2:
|
||||
d = data[i] | (data[i + 1] << 8)
|
||||
samples.append(d)
|
||||
except:
|
||||
print "conversion failed on word {0}".format(i/sample_size)
|
||||
|
||||
return samples
|
||||
|
||||
def open_serial_port(tty, baud_rate):
|
||||
|
||||
s = serial.serial_for_url(tty, baud_rate, timeout=2)
|
||||
if s is not None:
|
||||
print 'opened',tty,'at',str(baud_rate),'bps'
|
||||
else:
|
||||
print 'failed to open',tty
|
||||
return s
|
||||
|
||||
def record_from_tty(tty_prompt, tty_accessory, t=seconds, filename='test.wav',
|
||||
sample_size=SAMPLE_SIZE, sample_rate=FREQ, volume=100, accessory=False):
|
||||
if tty_prompt == tty_accessory:
|
||||
# sending commands via accessory connector, so set baud rate correctly
|
||||
s = SerialPortWrapper(tty_prompt, baud_rate=115200)
|
||||
else:
|
||||
s = SerialPortWrapper(tty_prompt)
|
||||
|
||||
try:
|
||||
prompt.go_to_prompt(s)
|
||||
print 'record {0}-bit audio data for {1}s at {2}Hz (~{3} samples)'.format(
|
||||
'8' if sample_size == 1 else '16', t, str(sample_rate), t * sample_rate)
|
||||
cmd = 'mic start {0} {1} {2} {3}'.format(t, '8' if sample_size == 1 else '16', sample_rate,
|
||||
volume)
|
||||
prompt.issue_command(s, cmd)
|
||||
s.close()
|
||||
|
||||
if (sample_size == 2) and (sample_rate == 16000):
|
||||
s = open_serial_port(tty_accessory, 460800)
|
||||
elif (sample_size == 2) or (sample_rate == 16000):
|
||||
s = open_serial_port(tty_accessory, 230400)
|
||||
else:
|
||||
s = open_serial_port(tty_accessory, 115200)
|
||||
|
||||
samples = receive_hdlc_data(s, sample_size)
|
||||
print '{0} samples read'.format(len(samples))
|
||||
if len(samples) != (sample_rate * t):
|
||||
print 'Not enough samples received ({0}/{1})'.format(len(samples), (sample_rate * t))
|
||||
else:
|
||||
print 'Output file: ' + filename
|
||||
store_wav(samples, filename, sample_size, sample_rate)
|
||||
|
||||
finally:
|
||||
s.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--tty_prompt', type=str,
|
||||
help="Prompt tty (e.g. /dev/cu.usbserial-xxxxxxxB or /dev/ttyUSB0). If this"
|
||||
"is specified, then the accessory port must be specified")
|
||||
parser.add_argument('--tty_accessory', type=str,
|
||||
help="Accessory port tty (e.g. /dev/cu.usbserial-xxxxxxxB or /dev/ttyUSB0)."
|
||||
"If the commands are being sent via the accessory port, this and only this,"
|
||||
"tty must be specified.")
|
||||
parser.add_argument('-o', '--output', type=str, default='test.wav',
|
||||
help="Output file name. Default: 'test.wav'")
|
||||
parser.add_argument('-d', '--duration', type=int, default=seconds,
|
||||
help="Number of seconds of audio that will be recorded. Default: 1s, Max: "
|
||||
"60s")
|
||||
parser.add_argument('-w', '--width', type=int, choices=[8, 16], default=16,
|
||||
help="Sample data width (8- or 16-bit). Default: 16-bit")
|
||||
parser.add_argument('-r', '--rate', type=int, choices=[8000, 16000], default=8000,
|
||||
help="Sample rate in Hz. Default: 8000")
|
||||
parser.add_argument('-v', '--volume', type=int, default=100,
|
||||
help="Volume (1 - 1000). Default: 100")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if args.tty_accessory and not args.tty_prompt:
|
||||
tty_accessory = args.tty_accessory
|
||||
tty_prompt = args.tty_accessory
|
||||
elif args.tty_prompt and not args.tty_accessory:
|
||||
raise Exception("If the prompt tty is specified, the accessory port tty must be specified "
|
||||
"too!")
|
||||
elif not args.tty_prompt and not args.tty_accessory:
|
||||
import pebble_tty
|
||||
|
||||
tty_prompt = pebble_tty.find_dbgserial_tty()
|
||||
tty_accessory = pebble_tty.find_accessory_tty()
|
||||
if not tty_prompt or not tty_accessory:
|
||||
raise Exception("Serial ports could not be resolved!")
|
||||
else:
|
||||
tty_accessory = args.tty_accessory
|
||||
tty_prompt = args.tty_prompt
|
||||
|
||||
sample_size = args.width / 8
|
||||
record_from_tty(tty_prompt, tty_accessory, args.duration, args.output, sample_size,
|
||||
args.rate, min(args.volume, 1000))
|
||||
|
40
tools/bamboo_deploy.sh
Executable file
|
@ -0,0 +1,40 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Usage:
|
||||
# bamboo_deploy.sh environment commit [staging?]
|
||||
# e.g. bamboo_deploy.sh porksmoothie v3.10-beta2
|
||||
# e.g. bamboo_deploy.sh release-v3.8 v3.10 1
|
||||
|
||||
bucket=pebblefw
|
||||
if [ "$3" = "1" ]; then
|
||||
bucket=$bucket-staging
|
||||
fi
|
||||
stage=$1
|
||||
notes="build/firmware/release-notes.txt"
|
||||
|
||||
cd tintin && git checkout $2 && cd ../
|
||||
|
||||
files=$(ls build/firmware/*.pbz)
|
||||
if [ "$files" = "" ]; then
|
||||
echo 'No .pbz files found'
|
||||
exit 1
|
||||
fi
|
||||
for bundle in $files; do
|
||||
python tintin/tools/deploy_pbz_to_pebblefw.py \
|
||||
--bucket $bucket \
|
||||
--stage $stage \
|
||||
--notes $notes \
|
||||
$bundle
|
||||
done
|
240
tools/battery_curve.py
Executable file
|
@ -0,0 +1,240 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import glob
|
||||
import numpy
|
||||
import pylab
|
||||
import re
|
||||
import scipy.interpolate
|
||||
import scipy.signal
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from pprint import pprint
|
||||
|
||||
VOLTAGE_SUFFIX = '_voltage'
|
||||
CURRENT_SUFFIX = '_VBAT'
|
||||
|
||||
def get_pair(voltage_path):
|
||||
return [voltage_path, re.sub('(\w+)%s.csv' % VOLTAGE_SUFFIX,
|
||||
'\\1%s.csv' % CURRENT_SUFFIX,
|
||||
voltage_path)]
|
||||
|
||||
def scrape_path(path, paths, output):
|
||||
if path.endswith('%s.csv' % VOLTAGE_SUFFIX):
|
||||
pair = get_pair(path)
|
||||
output.append(pair)
|
||||
paths.remove(pair[0])
|
||||
elif not path.endswith('%s.csv' % CURRENT_SUFFIX):
|
||||
output.append([path, ''])
|
||||
|
||||
def scrape_directory(output, path=''):
|
||||
paths = []
|
||||
csv_files = glob.glob('%s*.csv' % path)
|
||||
for path in csv_files:
|
||||
scrape_path(path, csv_files, output)
|
||||
return paths
|
||||
|
||||
def parse_paths(paths):
|
||||
parsed_paths = []
|
||||
if not paths or paths[0] == 'None':
|
||||
print 'No input provided: Grabbing all CSV pairs in current directory'
|
||||
scrape_directory(parsed_paths)
|
||||
else:
|
||||
for path in paths:
|
||||
if not path.endswith('.csv'):
|
||||
scrape_directory(parsed_paths, path + '/')
|
||||
else:
|
||||
scrape_path(path, paths, parsed_paths)
|
||||
if not len(parsed_paths):
|
||||
raise Exception('Needs data!')
|
||||
|
||||
return parsed_paths
|
||||
|
||||
def extract_csv(path):
|
||||
with open(path, 'rU') as csv_file:
|
||||
reader = csv.DictReader(csv_file, skipinitialspace=True)
|
||||
result = defaultdict(list)
|
||||
for row in reader:
|
||||
for key, value in row.items():
|
||||
result[key].append(float(value))
|
||||
|
||||
result = dict(result)
|
||||
return result
|
||||
|
||||
def get_data(voltage_path, current_path):
|
||||
result = extract_csv(voltage_path)
|
||||
|
||||
if not current_path:
|
||||
return result
|
||||
|
||||
current = extract_csv(current_path)
|
||||
|
||||
if current['current'][1] < 0:
|
||||
current['current'] = numpy.array(current['current']) * -1
|
||||
|
||||
result['current'] = numpy.interp(result['time'], current['time'], current['current'])
|
||||
return result
|
||||
|
||||
def round_to(vals, base=5):
|
||||
return [[data[0], int(base * round(float(data[1])/base))] for data in vals]
|
||||
|
||||
def get_curve(data, range=[100,90,80,70,60,50,40,30,20,10,5,2,0], is_discharge=False, capacity=0.0, graph=False):
|
||||
if 'current' in data:
|
||||
data['current'] = numpy.array(data['current'])
|
||||
|
||||
# Eliminate all current <= 0 mA -> Drawing current
|
||||
current_threshold = data['current'] > 0
|
||||
time = numpy.array(data['time'])[current_threshold]
|
||||
|
||||
# Use mAh as scale
|
||||
d_time = (numpy.insert(numpy.diff(time), 0, 0) / 3600.)
|
||||
mah = numpy.cumsum(d_time * data['current'][current_threshold])
|
||||
|
||||
print 'mAh: %d' % mah[-1]
|
||||
|
||||
# Convert mAh to SOC
|
||||
offset = 0
|
||||
if not capacity:
|
||||
capacity = mah[-1]
|
||||
elif not is_discharge:
|
||||
# If given the battery's capacity, it's assumed that it's 100% SOC.
|
||||
offset = capacity - mah[-1]
|
||||
scale = (mah + offset) / capacity * 100
|
||||
else:
|
||||
print 'Using time as scale!'
|
||||
scale = (numpy.array(data['time'])/data['time'][-1]*100)
|
||||
|
||||
voltage = data['voltage']
|
||||
cutoff = None
|
||||
|
||||
if is_discharge:
|
||||
# If discharging, 100% is at 0s so we need to reverse the arrays.
|
||||
scale = 100 - scale
|
||||
scale = scale[::-1]
|
||||
voltage = voltage[::-1]
|
||||
data['voltage'] = data['voltage'][::-1]
|
||||
else:
|
||||
# Eliminate all values after the last maximum value in the last 30%: Charging complete
|
||||
d_voltage = numpy.diff(voltage)[-int(len(voltage)*0.30):]
|
||||
reverse = d_voltage[::-1]
|
||||
reverse_drop_index = reverse.argmin()
|
||||
drop_index = len(voltage) - reverse_drop_index - 1
|
||||
# If the voltage drop is greater than 10mV and the time difference between 100% SOC
|
||||
# is within 3 time units, then the voltage has probably dropped due to the completion of charging.
|
||||
if reverse[reverse_drop_index] < -10 and abs(drop_index - len(scale)) <= 3:
|
||||
voltage = voltage[:drop_index]
|
||||
scale = scale[:drop_index]
|
||||
cutoff = data['time'][drop_index]
|
||||
print 'Detected end of charge! Dropping values past %ds @ %dmV.' % (data['time'][drop_index], voltage[-1])
|
||||
|
||||
print 'Scale starting @ %.2f%%, ending @ %.2f%%' % (scale[0], scale[-1])
|
||||
|
||||
window = 51
|
||||
if len(data['voltage']) < 51:
|
||||
window = 5
|
||||
|
||||
avg = scipy.signal.savgol_filter(voltage, window, 3)
|
||||
threshold = scale <= 100.0
|
||||
voltage = scipy.interpolate.InterpolatedUnivariateSpline(scale[threshold], avg[threshold], k=1)
|
||||
curve = numpy.array([range, voltage(range)])
|
||||
|
||||
if graph:
|
||||
# Plot voltage, current, and SOC on a graph.
|
||||
fig = pylab.figure(tight_layout=True)
|
||||
fig.subplots_adjust(right=0.85)
|
||||
axis = fig.add_subplot(111)
|
||||
axis.set_xlabel("Time (s)")
|
||||
axis.set_ylabel("Voltage (mV)")
|
||||
axis.plot(data['time'], data['voltage'], '-r')
|
||||
if 'current' in data:
|
||||
current = axis.twinx()
|
||||
current.set_ylabel("Current (mA)")
|
||||
current.plot(data['time'], data['current'], '-g')
|
||||
soc = axis.twinx()
|
||||
soc.set_ylabel("SOC (%)")
|
||||
soc.spines['right'].set_position(('axes', 1.1))
|
||||
soc.set_ylim(0, 100)
|
||||
soc.plot(data['time'][:len(scale)], scale, '-b')
|
||||
if cutoff:
|
||||
axis.axvline(cutoff, c='k', ls='--')
|
||||
axis.set_xlim(data['time'][0], data['time'][-1])
|
||||
if is_discharge:
|
||||
axis.invert_xaxis()
|
||||
fig.show()
|
||||
|
||||
return curve.transpose().astype('int16')
|
||||
|
||||
def get_avg_curve(paths, range, is_discharge, graph, capacity):
|
||||
parsed_paths = parse_paths(paths)
|
||||
|
||||
pprint(parsed_paths)
|
||||
|
||||
avg = numpy.array(numpy.zeros((len(range), 2)))
|
||||
for voltage_csv, current_csv in parsed_paths:
|
||||
curve = get_curve(get_data(voltage_csv, current_csv),
|
||||
range,
|
||||
is_discharge,
|
||||
capacity,
|
||||
graph)
|
||||
if graph:
|
||||
fig = pylab.figure(0, tight_layout=True)
|
||||
fig.gca().plot(curve.transpose()[0], curve.transpose()[1], '--')
|
||||
avg += curve
|
||||
avg /= len(parsed_paths)
|
||||
|
||||
if graph:
|
||||
fig.gca().plot(avg.transpose()[0], avg.transpose()[1])
|
||||
fig.gca().set_ylim(3200, 4500)
|
||||
if is_discharge:
|
||||
fig.gca().invert_xaxis()
|
||||
fig.show()
|
||||
return round_to(avg.astype('int16').tolist())
|
||||
|
||||
def main(argv):
|
||||
""" Generate a power curve. """
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-i', '--input', default='None',
|
||||
help='The data as pairs of files: Voltage,Current')
|
||||
parser.add_argument('-o', '--output',
|
||||
help='Where should we write the results?')
|
||||
parser.add_argument('-c', '--curve', default='100,90,80,70,60,50,40,30,20,10,5,2,0',
|
||||
help='What values of the curve do we calculate?')
|
||||
parser.add_argument('-d', '--discharge', action='store_true',
|
||||
help='Is this discharge data?')
|
||||
parser.add_argument('-g', '--graph', action='store_true',
|
||||
help='Should we show a graph?')
|
||||
parser.add_argument('-bc', '--capacity', default='0',
|
||||
help='What is the battery\'s capacity? (in mA)')
|
||||
|
||||
args = parser.parse_args()
|
||||
curve = get_avg_curve(args.input.split(','),
|
||||
[int(val) for val in args.curve.split(',')],
|
||||
args.discharge,
|
||||
args.graph,
|
||||
float(args.capacity))
|
||||
|
||||
output = ''
|
||||
for percent, voltage in reversed(curve):
|
||||
output += '\t{%-4s%5d},\n'.expandtabs(2) % (str(percent) + ',', voltage)
|
||||
|
||||
# Get rid of extra newline and comma
|
||||
print output[:-2]
|
||||
raw_input('Press enter to continue')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
326
tools/binutils.py
Normal file
|
@ -0,0 +1,326 @@
|
|||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import sh
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
NM_LINE_PATTERN = re.compile(r"""([0-9a-f]+)\s+ # address
|
||||
([0-9a-f]+)\s+ # size
|
||||
([dDbBtTrR])\s+ # section type
|
||||
(\S+) # name
|
||||
\s*((\S+)\:([0-9]+))?$ # filename + line
|
||||
""", flags=re.VERBOSE)
|
||||
|
||||
|
||||
class Symbol(object):
|
||||
def __init__(self, name, size):
|
||||
self.name = name
|
||||
self.size = size
|
||||
|
||||
def __str__(self):
|
||||
return '<Symbol %s: %u>' % (self.name, self.size)
|
||||
|
||||
|
||||
class FileInfo(object):
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
self.size = 0
|
||||
self.symbols = {}
|
||||
|
||||
def add_entry(self, symbol_name, size):
|
||||
if symbol_name in self.symbols:
|
||||
return
|
||||
|
||||
self.size += size
|
||||
self.symbols[symbol_name] = Symbol(symbol_name, size)
|
||||
|
||||
def remove_entry(self, symbol_name):
|
||||
result = self.symbols.pop(symbol_name, None)
|
||||
if result is not None:
|
||||
self.size -= result.size
|
||||
return result
|
||||
|
||||
def pprint(self, verbose):
|
||||
print ' %s: size %u' % (self.filename, self.size)
|
||||
if verbose:
|
||||
l = sorted(self.symbols.itervalues(), key=lambda x: -x.size)
|
||||
for s in l:
|
||||
print ' %6u %-36s' % (s.size, s.name)
|
||||
|
||||
def __str__(self):
|
||||
return '<FileInfo %s: %u>' % (self.filename, self.size)
|
||||
|
||||
|
||||
class SectionInfo(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.count = 0
|
||||
self.size = 0
|
||||
self.files = {}
|
||||
|
||||
def add_entry(self, name, filename, size):
|
||||
self.count += 1
|
||||
self.size += size
|
||||
|
||||
if filename not in self.files:
|
||||
self.files[filename] = FileInfo(filename)
|
||||
|
||||
self.files[filename].add_entry(name, size)
|
||||
|
||||
def remove_unknown_entry(self, name):
|
||||
if 'Unknown' not in self.files:
|
||||
return
|
||||
result = self.files['Unknown'].remove_entry(name)
|
||||
if result is not None:
|
||||
self.size -= result.size
|
||||
return result
|
||||
|
||||
def get_files(self):
|
||||
return self.files.values()
|
||||
|
||||
def pprint(self, summary, verbose):
|
||||
print '%s: count %u size %u' % (self.name, self.count, self.size)
|
||||
|
||||
if not summary:
|
||||
l = self.files.values()
|
||||
l = sorted(l, key=lambda f: -f.size)
|
||||
for f in l:
|
||||
f.pprint(verbose)
|
||||
|
||||
|
||||
def analyze_elf(elf_file_path, sections_letters, use_fast_nm):
|
||||
""" Analyzes the elf file, using binutils.
|
||||
section_letters -- string of letters representing the sections to
|
||||
analyze, e.g. 'tbd' => text, bss and data.
|
||||
use_fast_nm -- If False, a slow lookup method is used to avoid a bug in
|
||||
`nm`. If True, the faster `nm -S -l` is used.
|
||||
Returns a dictionary with SectionInfo objects for each section.
|
||||
"""
|
||||
def make_sections_dict(sections_letters):
|
||||
sections = {}
|
||||
for s in sections_letters:
|
||||
if s == 'b':
|
||||
sections['b'] = SectionInfo('.bss')
|
||||
elif s == 'd':
|
||||
sections['d'] = SectionInfo('.data')
|
||||
elif s == 't':
|
||||
sections['t'] = SectionInfo('.text')
|
||||
else:
|
||||
raise Exception('Invalid section <%s>, must be a combination'
|
||||
' of [bdt] characters\n' % s)
|
||||
return sections
|
||||
sections = make_sections_dict(sections_letters)
|
||||
|
||||
generator = nm_generator(elf_file_path, use_fast_nm)
|
||||
for (_, section, symbol_name, filename, line, size) in generator:
|
||||
if not filename:
|
||||
filename = 'Unknown'
|
||||
if section in sections:
|
||||
sections[section].add_entry(symbol_name, filename, size)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
def nm_generator(elf_path, use_fast_nm=True):
|
||||
if use_fast_nm:
|
||||
return _nm_generator_fast(elf_path)
|
||||
else:
|
||||
return _nm_generator_slow(elf_path)
|
||||
|
||||
|
||||
def _get_symbols_table(f):
|
||||
# NOTE: nm crashes when we pass in the -l command line option. As a
|
||||
# workaround, we use readelf to get the symbol to address mappings and then
|
||||
# we use addr2line to get file/lines from the addresses.
|
||||
infile = sh.arm_none_eabi_readelf('-s', '-W', f)
|
||||
|
||||
line_pattern = re.compile(r"""\s+([0-9]+\:)\s+ # number
|
||||
([0-9a-f]+)\s+ # address
|
||||
([0-9]+)\s+ # size
|
||||
(\S+)\s+ # type
|
||||
(\S+)\s+ # Bind
|
||||
(\S+)\s+ # Visibility
|
||||
(\S+)\s+ # Ndx
|
||||
(\S+) # symbol name
|
||||
""", flags=re.VERBOSE)
|
||||
|
||||
def create_addr2line_process():
|
||||
return subprocess.Popen(['arm-none-eabi-addr2line', '-e', f],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
addr2line = create_addr2line_process()
|
||||
|
||||
symbols = {}
|
||||
for line_num, line in enumerate(infile):
|
||||
if (line_num % 300) == 0:
|
||||
sys.stdout.write(".")
|
||||
sys.stdout.flush()
|
||||
|
||||
match = line_pattern.match(line)
|
||||
|
||||
if match is None:
|
||||
continue
|
||||
|
||||
type = match.group(4)
|
||||
if type not in ['FUNC', 'OBJECT']:
|
||||
continue
|
||||
addr = match.group(2)
|
||||
symbol_name = match.group(8)
|
||||
|
||||
success = False
|
||||
while not success:
|
||||
try:
|
||||
addr2line.stdin.write("0x%s\n" % addr)
|
||||
success = True
|
||||
except IOError:
|
||||
# This happens if the previous iteration caused an error
|
||||
addr2line = create_addr2line_process()
|
||||
|
||||
src_file_line = addr2line.stdout.readline().strip()
|
||||
if src_file_line:
|
||||
# Some Bluetopia paths start with 'C:\...'
|
||||
components = src_file_line.split(':')
|
||||
src_file = ":".join(components[:-1])
|
||||
line = components[-1:][0]
|
||||
else:
|
||||
(src_file, line) = ('?', '0')
|
||||
symbols[symbol_name] = (src_file, line)
|
||||
|
||||
addr2line.kill()
|
||||
|
||||
print
|
||||
return symbols
|
||||
|
||||
|
||||
# This method is quite slow, but works around a bug in nm.
|
||||
def _nm_generator_slow(f):
|
||||
print "Getting list of symbols..."
|
||||
symbols = _get_symbols_table(f)
|
||||
print "Aggregating..."
|
||||
infile = sh.arm_none_eabi_nm('-S', f)
|
||||
|
||||
line_pattern = re.compile(r"""([0-9a-f]+)\s+ # address
|
||||
([0-9a-f]+)\s+ # size
|
||||
([dDbBtTrR])\s+ # section type
|
||||
(\S+) # name
|
||||
""", flags=re.VERBOSE)
|
||||
|
||||
for line in infile:
|
||||
match = line_pattern.match(line)
|
||||
|
||||
if match is None:
|
||||
continue
|
||||
|
||||
addr = int(match.group(1), 16)
|
||||
size = int(match.group(2), 16)
|
||||
section = match.group(3).lower()
|
||||
if section == 'r':
|
||||
section = 't'
|
||||
symbol_name = match.group(4)
|
||||
if symbol_name not in symbols:
|
||||
continue
|
||||
rel_file_path, line = symbols[symbol_name]
|
||||
if rel_file_path:
|
||||
rel_file_path = os.path.relpath(rel_file_path)
|
||||
|
||||
yield (addr, section, symbol_name, rel_file_path, line, size)
|
||||
|
||||
|
||||
# This method is much faster, and *should* work, but as of 2014-08-01, we get
|
||||
# exceptions when we try to run nm -l on the tintin ELF file. So, the
|
||||
# _nm_generator_slow() method above can be used as a workaround.
|
||||
def _nm_generator_fast(f):
|
||||
""" Given a path to an .elf, generates tuples:
|
||||
(section, symbol_name, rel_file_path, line, size)
|
||||
Note, rel_file_path and line can be None.
|
||||
|
||||
"""
|
||||
infile = sh.arm_none_eabi_nm('-l', '-S', f)
|
||||
|
||||
for line in infile:
|
||||
match = NM_LINE_PATTERN.match(line)
|
||||
|
||||
if match is None:
|
||||
continue
|
||||
|
||||
addr = int(match.group(1), 16)
|
||||
size = int(match.group(2), 16)
|
||||
|
||||
section = match.group(3).lower()
|
||||
if section == 'r':
|
||||
section = 't'
|
||||
symbol_name = match.group(4)
|
||||
|
||||
rel_file_path = match.group(6)
|
||||
if rel_file_path:
|
||||
rel_file_path = os.path.relpath(rel_file_path)
|
||||
|
||||
line = match.group(7)
|
||||
if line:
|
||||
line = int(line)
|
||||
|
||||
yield (addr, section, symbol_name, rel_file_path, line, size)
|
||||
|
||||
|
||||
def size(elf_path):
|
||||
""" Returns size (text, data, bss)
|
||||
|
||||
"""
|
||||
output = subprocess.check_output(["arm-none-eabi-size", elf_path])
|
||||
|
||||
lines = output.splitlines()
|
||||
if len(lines) < 2:
|
||||
return 0
|
||||
match = re.match("^\s*([0-9]+)\s+([0-9]+)\s+([0-9]+)", lines[1])
|
||||
if not match:
|
||||
return 0
|
||||
# text, data, bss
|
||||
return (int(match.groups()[0]),
|
||||
int(match.groups()[1]),
|
||||
int(match.groups()[2]))
|
||||
|
||||
|
||||
def strip(elf_path):
|
||||
""" Strip debug info from specified .elf file
|
||||
"""
|
||||
sh.arm_none_eabi_strip(elf_path)
|
||||
|
||||
|
||||
def copy_elf_section(in_elf_path, out_elf_path, section_name_list):
|
||||
""" Creates out_elf_path containing only sections in 'section name list'
|
||||
"""
|
||||
args = []
|
||||
for name in section_name_list:
|
||||
args.append('-j')
|
||||
args.append(name)
|
||||
args.append(in_elf_path)
|
||||
args.append(out_elf_path)
|
||||
sh.arm_none_eabi_objcopy(args)
|
||||
|
||||
|
||||
def section_bytes(elf_path, section_name):
|
||||
""" Returns the bytes in a section of a given .elf file
|
||||
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
sh.arm_none_eabi_objcopy(['-j', section_name, '-O', 'binary',
|
||||
elf_path, temp.name])
|
||||
with open(temp.name) as f:
|
||||
return f.read()
|
447
tools/bitmapgen.py
Normal file
|
@ -0,0 +1,447 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2024 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import StringIO
|
||||
import argparse
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import png
|
||||
import itertools
|
||||
|
||||
import generate_c_byte_array
|
||||
from pebble_image_routines import (rgba32_triplet_to_argb8, num_colors_to_bitdepth,
|
||||
get_reduction_func)
|
||||
|
||||
SCALE_TO_GCOLOR8 = 64
|
||||
|
||||
WHITE_COLOR_MAP = {
|
||||
'white': 1,
|
||||
'black': 0,
|
||||
'transparent': 0,
|
||||
}
|
||||
|
||||
BLACK_COLOR_MAP = {
|
||||
'white': 0,
|
||||
'black': 1,
|
||||
'transparent': 0,
|
||||
}
|
||||
|
||||
# translates bitdepth to supported PBI format
|
||||
#0 is special case of legacy 1Bit format
|
||||
bitdepth_dict = {0: 0, #GBitmapFormat1Bit
|
||||
8: 1, #GBitmapFormat8Bit
|
||||
1: 2, #GBitmapFormat1BitPalette
|
||||
2: 3, #GBitmapFormat2BitPalette
|
||||
4: 4} #GBitmapFormat4BitPalette
|
||||
|
||||
FORMAT_BW = "bw"
|
||||
FORMAT_COLOR = "color"
|
||||
FORMAT_COLOR_RAW = "color_raw" # forces output to be ARGB8 (no palette)
|
||||
|
||||
FORMAT_CHOICES = [FORMAT_BW, FORMAT_COLOR, FORMAT_COLOR_RAW]
|
||||
DEFAULT_FORMAT = FORMAT_BW
|
||||
|
||||
TRUNCATE = "truncate"
|
||||
NEAREST = "nearest"
|
||||
COLOR_REDUCTION_CHOICES = [TRUNCATE, NEAREST]
|
||||
DEFAULT_COLOR_REDUCTION = NEAREST
|
||||
|
||||
# Bitmap struct only contains a color palette for GBitmapFormat(1/2/4)BitPalette
|
||||
|
||||
# Bitmap struct (NB: All fields are little-endian)
|
||||
# (uint16_t) row_size_bytes
|
||||
# (uint16_t) info_flags
|
||||
# bit 0 : is heap allocated (must be zero for bitmap files)
|
||||
# bits 1-5 : bitmap_format
|
||||
# bits 6-11 : reserved, must be 0
|
||||
# bits 12-15 : file version
|
||||
# (int16_t) bounds.origin.x
|
||||
# (int16_t) bounds.origin.y
|
||||
# (int16_t) bounds.size.w
|
||||
# (int16_t) bounds.size.h
|
||||
# (uint8_t)[] image data (row_size_bytes-aligned, 0-padded rows of bits)
|
||||
# [optional] (uint8_t)[] argb8 palette data (0-padded to 2 ** bitdepth)
|
||||
|
||||
class PebbleBitmap(object):
|
||||
def __init__(self, path, color_map=WHITE_COLOR_MAP, bitmap_format=DEFAULT_FORMAT,
|
||||
color_reduction_method=DEFAULT_COLOR_REDUCTION, crop=True, bitdepth=None,
|
||||
palette_name='pebble64'):
|
||||
self.palette_name = palette_name
|
||||
self.version = 1
|
||||
self.path = path
|
||||
self.name, _ = os.path.splitext(os.path.basename(path))
|
||||
self.color_map = color_map
|
||||
self.palette = None # only used in color mode for <=16 colors
|
||||
self.bitdepth = bitdepth # number of bits per pixel, 0 for legacy b&w
|
||||
if bitmap_format == FORMAT_BW:
|
||||
self.bitdepth = 0
|
||||
self.bitmap_format = bitmap_format
|
||||
self.color_reduction_method = color_reduction_method
|
||||
width, height, pixels, metadata = png.Reader(filename=path).asRGBA8()
|
||||
|
||||
# convert planar boxed row flat pixel to 2d array of (R, G, B, A)
|
||||
self._im_pixels = []
|
||||
for row in pixels:
|
||||
row_list = []
|
||||
for (r, g, b, a) in grouper(row, 4):
|
||||
row_list.append((r, g, b, a))
|
||||
self._im_pixels.append(row_list)
|
||||
|
||||
self._im_size = (width, height)
|
||||
self._set_bbox(crop)
|
||||
|
||||
def _set_bbox(self, crop=True):
|
||||
left, top = (0, 0)
|
||||
right, bottom = self._im_size
|
||||
|
||||
if crop:
|
||||
alphas = [[p[3] for p in row] for row in self._im_pixels]
|
||||
alphas_transposed = zip(*alphas)
|
||||
for row in alphas:
|
||||
if any(row):
|
||||
break
|
||||
top += 1
|
||||
for row in reversed(alphas):
|
||||
if any(row):
|
||||
break
|
||||
bottom -= 1
|
||||
for row in alphas_transposed:
|
||||
if any(row):
|
||||
break
|
||||
left += 1
|
||||
for row in reversed(alphas_transposed):
|
||||
if any(row):
|
||||
break
|
||||
right -= 1
|
||||
|
||||
self.x = left
|
||||
self.y = top
|
||||
self.w = right - left
|
||||
self.h = bottom - top
|
||||
|
||||
def row_size_bytes(self):
|
||||
"""
|
||||
Return the length of the bitmap's row in bytes.
|
||||
|
||||
On b/w, row lengths are rounded up to the nearest word, padding up to
|
||||
3 empty bytes per row.
|
||||
On color, row lengths are rounded up to the nearest byte
|
||||
"""
|
||||
if self.bitmap_format == FORMAT_COLOR_RAW:
|
||||
return self.w
|
||||
elif self.bitmap_format == FORMAT_COLOR:
|
||||
# adds (8 / bitdepth) - 1 to round up (ceil) to the next nearest byte
|
||||
return (self.w + ((8 / self.bitdepth) - 1)) / (8 / self.bitdepth)
|
||||
else:
|
||||
row_size_padded_words = (self.w + 31) / 32
|
||||
return row_size_padded_words * 4
|
||||
|
||||
def info_flags(self):
|
||||
"""Returns the type and version of bitmap."""
|
||||
format_value = bitdepth_dict[self.bitdepth]
|
||||
return self.version << 12 | format_value << 1
|
||||
|
||||
def pbi_header(self):
|
||||
return struct.pack('<HHhhhh',
|
||||
self.row_size_bytes(),
|
||||
self.info_flags(),
|
||||
self.x,
|
||||
self.y,
|
||||
self.w,
|
||||
self.h)
|
||||
|
||||
def image_bits_bw(self):
|
||||
"""
|
||||
Return a raw b/w bitmap capable of being rendered using Pebble's bitblt graphics routines.
|
||||
|
||||
The returned bitmap will always be y * row_size_bytes large.
|
||||
"""
|
||||
|
||||
def get_monochrome_value_for_pixel(pixel):
|
||||
if pixel[3] < 127:
|
||||
return self.color_map['transparent']
|
||||
if ((pixel[0] + pixel[1] + pixel[2]) / 3) < 127:
|
||||
return self.color_map['black']
|
||||
return self.color_map['white']
|
||||
|
||||
def pack_pixels_to_bitblt_word(pixels, x_offset, x_max):
|
||||
word = 0
|
||||
for column in xrange(0, 32):
|
||||
x = x_offset + column
|
||||
if (x < x_max):
|
||||
pixel = pixels[x]
|
||||
word |= get_monochrome_value_for_pixel(pixel) << (column)
|
||||
|
||||
return struct.pack('<I', word)
|
||||
|
||||
src_pixels = self._im_pixels
|
||||
out_pixels = []
|
||||
row_size_words = self.row_size_bytes() / 4
|
||||
|
||||
for row in xrange(self.y, self.y + self.h):
|
||||
x_max = self._im_size[0]
|
||||
for column_word in xrange(0, row_size_words):
|
||||
x_offset = self.x + column_word * 32
|
||||
out_pixels.append(pack_pixels_to_bitblt_word(src_pixels[row],
|
||||
x_offset,
|
||||
x_max))
|
||||
|
||||
return ''.join(out_pixels)
|
||||
|
||||
def image_bits_color(self):
|
||||
"""
|
||||
Return a raw color bitmap capable of being rendered using Pebble's bitblt graphics routines.
|
||||
"""
|
||||
|
||||
if self.bitmap_format == FORMAT_COLOR_RAW:
|
||||
self.bitdepth = 8 # forced to 8-bit depth for color_raw, no palette
|
||||
else:
|
||||
self.generate_palette()
|
||||
|
||||
assert self.bitdepth is not None
|
||||
out_pixels = []
|
||||
for row in xrange(self.y, self.y + self.h):
|
||||
packed_count = 0
|
||||
packed_value = 0
|
||||
for column in xrange(self.x, self.x + self.w):
|
||||
pixel = self._im_pixels[row][column]
|
||||
r, g, b, a = [pixel[i] for i in range(4)]
|
||||
|
||||
# convert RGBA 32-bit image colors to pebble color table
|
||||
fn = get_reduction_func(self.palette_name, self.color_reduction_method)
|
||||
r, g, b, a = fn(r, g, b, a)
|
||||
if a == 0:
|
||||
# clear values in transparent pixels
|
||||
r, g, b = (0, 0, 0)
|
||||
|
||||
# convert colors to ARGB8 format
|
||||
argb8 = rgba32_triplet_to_argb8(r, g, b, a)
|
||||
|
||||
if (self.bitdepth == 8):
|
||||
out_pixels.append(struct.pack("B", argb8))
|
||||
else:
|
||||
# all palettized color bitdepths (1, 2, 4)
|
||||
# look up the color index in the palette
|
||||
color_index = self.palette.index(argb8)
|
||||
# shift and store the color index in a packed value
|
||||
packed_count = packed_count + 1 # pre-increment for calculation below
|
||||
packed_value = packed_value | (color_index << \
|
||||
(self.bitdepth * (8 / self.bitdepth - (packed_count))))
|
||||
|
||||
if (packed_count == 8 / self.bitdepth):
|
||||
out_pixels.append(struct.pack("B", packed_value))
|
||||
packed_count = 0
|
||||
packed_value = 0
|
||||
|
||||
# write out the last non-byte-aligned set for the row (ie. byte-align rows)
|
||||
if (packed_count):
|
||||
out_pixels.append(struct.pack("B", packed_value))
|
||||
|
||||
return ''.join(out_pixels)
|
||||
|
||||
def image_bits(self):
|
||||
if self.bitmap_format == FORMAT_COLOR or self.bitmap_format == FORMAT_COLOR_RAW:
|
||||
return self.image_bits_color()
|
||||
else:
|
||||
return self.image_bits_bw()
|
||||
|
||||
def header(self):
|
||||
f = StringIO.StringIO()
|
||||
f.write("// GBitmap + pixel data generated by bitmapgen.py:\n\n")
|
||||
bytes = self.image_bits()
|
||||
bytes_var_name = "s_{var_name}_pixels".format(var_name=self.name)
|
||||
generate_c_byte_array.write(f, bytes, bytes_var_name)
|
||||
f.write("static const GBitmap s_{0}_bitmap = {{\n".format(self.name))
|
||||
f.write(" .addr = (void*) &{0},\n".format(bytes_var_name))
|
||||
f.write(" .row_size_bytes = {0},\n".format(self.row_size_bytes()))
|
||||
f.write(" .info_flags = 0x%02x,\n" % self.info_flags())
|
||||
f.write(" .bounds = {\n")
|
||||
f.write(" .origin = {{ .x = {0}, .y = {1} }},\n".format(self.x, self.y))
|
||||
f.write(" .size = {{ .w = {0}, .h = {1} }},\n".format(self.w, self.h))
|
||||
f.write(" },\n")
|
||||
f.write("};\n\n")
|
||||
return f.getvalue()
|
||||
|
||||
def convert_to_h(self, header_file=None):
|
||||
to_file = header_file if header_file else (os.path.splitext(self.path)[0] + '.h')
|
||||
with open(to_file, 'w') as f:
|
||||
f.write(self.header())
|
||||
return to_file
|
||||
|
||||
def convert_to_pbi(self):
|
||||
pbi_bits = []
|
||||
image_data = self.image_bits() # compute before generating header
|
||||
|
||||
pbi_bits.extend(self.pbi_header())
|
||||
pbi_bits.extend(image_data)
|
||||
if self.palette and self.bitdepth < 8:
|
||||
# write out palette, padded to the bitdepth
|
||||
for i in xrange(0, 2**self.bitdepth):
|
||||
value = 0
|
||||
if i < len(self.palette):
|
||||
value = self.palette[i]
|
||||
pbi_bits.extend(struct.pack('B', value))
|
||||
|
||||
return b"".join(pbi_bits)
|
||||
|
||||
def convert_to_pbi_file(self, pbi_file=None):
|
||||
to_file = pbi_file if pbi_file else (os.path.splitext(self.path)[0] + '.pbi')
|
||||
|
||||
with open(to_file, 'wb') as f:
|
||||
f.write(self.convert_to_pbi())
|
||||
|
||||
return to_file
|
||||
|
||||
def generate_palette(self):
|
||||
self.palette = []
|
||||
for row in xrange(self.y, self.y + self.h):
|
||||
for column in xrange(self.x, self.x + self.w):
|
||||
pixel = self._im_pixels[row][column]
|
||||
r, g, b, a = [pixel[i] for i in range(4)]
|
||||
|
||||
# convert RGBA 32-bit image colors to pebble color table
|
||||
fn = get_reduction_func(self.palette_name, self.color_reduction_method)
|
||||
r, g, b, a = fn(r, g, b, a)
|
||||
|
||||
if a == 0:
|
||||
# clear values in transparent pixels
|
||||
r, g, b = (0, 0, 0)
|
||||
|
||||
# store color value as ARGB8 entry in the palette
|
||||
self.palette.append(rgba32_triplet_to_argb8(r, g, b, a))
|
||||
|
||||
# remove duplicate colors
|
||||
self.palette = list(set(self.palette))
|
||||
|
||||
# get the bitdepth for the number of colors
|
||||
min_bitdepth = num_colors_to_bitdepth(len(self.palette))
|
||||
if self.bitdepth is None:
|
||||
self.bitdepth = min_bitdepth
|
||||
if self.bitdepth < min_bitdepth:
|
||||
raise Exception("Required bitdepth {} is lower than required depth {}."
|
||||
.format(self.bitdepth, min_bitdepth))
|
||||
|
||||
self.palette.extend([0] * (self.bitdepth - len(self.palette)))
|
||||
|
||||
|
||||
def cmd_pbi(args):
|
||||
pb = PebbleBitmap(args.input_png, bitmap_format=args.format,
|
||||
color_reduction_method=args.color_reduction_method, crop=not args.disable_crop)
|
||||
pb.convert_to_pbi_file(args.output_pbi)
|
||||
|
||||
|
||||
def cmd_header(args):
|
||||
pb = PebbleBitmap(args.input_png, bitmap_format=args.format,
|
||||
color_reduction_method=args.color_reduction_method, crop=not args.disable_crop)
|
||||
print pb.header()
|
||||
|
||||
|
||||
def cmd_white_trans_pbi(args):
|
||||
pb = PebbleBitmap(args.input_png, WHITE_COLOR_MAP, crop=not args.disable_crop)
|
||||
pb.convert_to_pbi_file(args.output_pbi)
|
||||
|
||||
|
||||
def cmd_black_trans_pbi(args):
|
||||
pb = PebbleBitmap(args.input_png, BLACK_COLOR_MAP, crop=not args.disable_crop)
|
||||
pb.convert_to_pbi_file(args.output_pbi)
|
||||
|
||||
|
||||
def process_all_bitmaps():
|
||||
directory = "bitmaps"
|
||||
paths = []
|
||||
for _, _, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
if os.path.splitext(filename)[1] == '.png':
|
||||
paths.append(os.path.join(directory, filename))
|
||||
|
||||
header_paths = []
|
||||
for path in paths:
|
||||
b = PebbleBitmap(path)
|
||||
b.convert_to_pbi_file()
|
||||
to_file = b.convert_to_h()
|
||||
header_paths.append(os.path.basename(to_file))
|
||||
|
||||
f = open(os.path.join(directory, 'bitmaps.h'), 'w')
|
||||
print>> f, '#pragma once'
|
||||
for h in header_paths:
|
||||
print>> f, "#include \"{0}\"".format(h)
|
||||
f.close()
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
from itertools import izip_longest
|
||||
|
||||
args = [iter(iterable)] * n
|
||||
return izip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
def process_cmd_line_args():
|
||||
parser = argparse.ArgumentParser(description="Generate pebble-usable files from png images")
|
||||
|
||||
parser_parent = argparse.ArgumentParser(add_help=False)
|
||||
parser_parent.add_argument('--disable_crop', required=False, action='store_true',
|
||||
help='Disable transparent region cropping for PBI output')
|
||||
parser_parent.add_argument('--color_reduction_method', metavar='method', required=False,
|
||||
nargs=1, default=NEAREST, choices=COLOR_REDUCTION_CHOICES,
|
||||
help="Method used to convert colors to Pebble's color palette, "
|
||||
"options are [{}, {}]".format(NEAREST, TRUNCATE))
|
||||
|
||||
subparsers = parser.add_subparsers(help="commands", dest='which')
|
||||
|
||||
bitmap_format = {"dest": "format", "metavar": "BITMAP_FORMAT",
|
||||
"choices": FORMAT_CHOICES, "nargs": "?",
|
||||
"default": DEFAULT_FORMAT, "help": "resulting GBitmap format"}
|
||||
input_png = {"dest": "input_png", "metavar": "INPUT_PNG", "help": "The png image to process"}
|
||||
output_pbi = {"dest": "output_pbi", "metavar": "OUTPUT_PBI", "help": "The pbi output file"}
|
||||
|
||||
pbi_parser = subparsers.add_parser('pbi', parents=[parser_parent],
|
||||
help="make a .pbi (pebble binary image) file")
|
||||
|
||||
for arg in [bitmap_format, input_png, output_pbi]:
|
||||
pbi_parser.add_argument(**arg)
|
||||
pbi_parser.set_defaults(func=cmd_pbi)
|
||||
|
||||
h_parser = subparsers.add_parser('header', parents=[parser_parent], help="make a .h file")
|
||||
for arg in [bitmap_format, input_png]:
|
||||
h_parser.add_argument(**arg)
|
||||
h_parser.set_defaults(func=cmd_header)
|
||||
|
||||
white_pbi_parser = subparsers.add_parser('white_trans_pbi', parents=[parser_parent],
|
||||
help="make a .pbi (pebble binary image) file for a white transparency layer")
|
||||
for arg in [input_png, output_pbi]:
|
||||
white_pbi_parser.add_argument(**arg)
|
||||
white_pbi_parser.set_defaults(func=cmd_white_trans_pbi)
|
||||
|
||||
black_pbi_parser = subparsers.add_parser('black_trans_pbi', parents=[parser_parent],
|
||||
help="make a .pbi (pebble binary image) file for a black transparency layer")
|
||||
for arg in [input_png, output_pbi]:
|
||||
black_pbi_parser.add_argument(**arg)
|
||||
black_pbi_parser.set_defaults(func=cmd_black_trans_pbi)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
def main():
|
||||
if (len(sys.argv) < 2):
|
||||
# process everything in the bitmaps folder
|
||||
process_all_bitmaps()
|
||||
else:
|
||||
# process an individual file
|
||||
process_cmd_line_args()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
BIN
tools/bitmaps/action_button_list.png
Normal file
After Width: | Height: | Size: 947 B |
BIN
tools/bitmaps/action_button_x.png
Normal file
After Width: | Height: | Size: 950 B |
BIN
tools/bitmaps/background_worker.png
Normal file
After Width: | Height: | Size: 983 B |
BIN
tools/bitmaps/bar_icon_actions.png
Normal file
After Width: | Height: | Size: 265 B |
BIN
tools/bitmaps/bar_icon_check.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
tools/bitmaps/bar_icon_down.png
Normal file
After Width: | Height: | Size: 1,013 B |
BIN
tools/bitmaps/bar_icon_ellipsis.png
Normal file
After Width: | Height: | Size: 219 B |
BIN
tools/bitmaps/bar_icon_next.png
Normal file
After Width: | Height: | Size: 236 B |
BIN
tools/bitmaps/bar_icon_no.png
Normal file
After Width: | Height: | Size: 248 B |
BIN
tools/bitmaps/bar_icon_pause.png
Normal file
After Width: | Height: | Size: 204 B |
BIN
tools/bitmaps/bar_icon_phone.png
Normal file
After Width: | Height: | Size: 264 B |
BIN
tools/bitmaps/bar_icon_play.png
Normal file
After Width: | Height: | Size: 236 B |
BIN
tools/bitmaps/bar_icon_playpause.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
tools/bitmaps/bar_icon_previous.png
Normal file
After Width: | Height: | Size: 233 B |
BIN
tools/bitmaps/bar_icon_snooze.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
tools/bitmaps/bar_icon_speaker_loud.png
Normal file
After Width: | Height: | Size: 261 B |
BIN
tools/bitmaps/bar_icon_speaker_soft.png
Normal file
After Width: | Height: | Size: 251 B |
BIN
tools/bitmaps/bar_icon_up.png
Normal file
After Width: | Height: | Size: 175 B |
BIN
tools/bitmaps/bar_icon_x.png
Normal file
After Width: | Height: | Size: 263 B |
BIN
tools/bitmaps/bar_icon_yes.png
Normal file
After Width: | Height: | Size: 249 B |
BIN
tools/bitmaps/bootlogo.png
Normal file
After Width: | Height: | Size: 292 B |
BIN
tools/bitmaps/error_icon.png
Normal file
After Width: | Height: | Size: 292 B |
BIN
tools/bitmaps/fps_background.png
Normal file
After Width: | Height: | Size: 207 B |
BIN
tools/bitmaps/fps_topleft.png
Normal file
After Width: | Height: | Size: 491 B |
BIN
tools/bitmaps/golf_api_click.png
Normal file
After Width: | Height: | Size: 319 B |
BIN
tools/bitmaps/golf_api_down.png
Normal file
After Width: | Height: | Size: 283 B |
BIN
tools/bitmaps/golf_api_up.png
Normal file
After Width: | Height: | Size: 275 B |
BIN
tools/bitmaps/music_launcher_icon.png
Normal file
After Width: | Height: | Size: 46 KiB |
BIN
tools/bitmaps/notification_status_checkmark.png
Normal file
After Width: | Height: | Size: 2.9 KiB |
BIN
tools/bitmaps/notification_status_crossmark.png
Normal file
After Width: | Height: | Size: 2.9 KiB |
BIN
tools/bitmaps/notification_status_large_crossmark.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
tools/bitmaps/prf_app.png
Normal file
After Width: | Height: | Size: 1.5 KiB |
BIN
tools/bitmaps/prf_pair.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
tools/bitmaps/prf_progress.png
Normal file
After Width: | Height: | Size: 907 B |
BIN
tools/bitmaps/pug.png
Normal file
After Width: | Height: | Size: 49 KiB |
BIN
tools/bitmaps/quiet_time_status_icon.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
tools/bitmaps/sad-watch-img.png
Normal file
After Width: | Height: | Size: 257 B |
BIN
tools/bitmaps/safe_msg.png
Normal file
After Width: | Height: | Size: 648 B |
BIN
tools/bitmaps/scroll_shadow_bottom.png
Normal file
After Width: | Height: | Size: 2.8 KiB |
BIN
tools/bitmaps/scroll_shadow_top.png
Normal file
After Width: | Height: | Size: 2.8 KiB |
BIN
tools/bitmaps/sports_api_pause_icon.png
Normal file
After Width: | Height: | Size: 151 B |
BIN
tools/bitmaps/sports_api_resume_icon.png
Normal file
After Width: | Height: | Size: 256 B |
BIN
tools/bitmaps/status_battery_charged.png
Normal file
After Width: | Height: | Size: 981 B |
BIN
tools/bitmaps/status_battery_charging.png
Normal file
After Width: | Height: | Size: 233 B |
BIN
tools/bitmaps/status_battery_empty.png
Normal file
After Width: | Height: | Size: 163 B |
BIN
tools/bitmaps/status_icon_airplane_mode.png
Normal file
After Width: | Height: | Size: 151 B |
BIN
tools/bitmaps/status_icon_bluetooth.png
Normal file
After Width: | Height: | Size: 241 B |
BIN
tools/bitmaps/status_icon_do_not_disturb.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
tools/bitmaps/status_icon_email.png
Normal file
After Width: | Height: | Size: 2.8 KiB |
BIN
tools/bitmaps/status_icon_facebook.png
Normal file
After Width: | Height: | Size: 253 B |
BIN
tools/bitmaps/status_icon_launcher.png
Normal file
After Width: | Height: | Size: 2.8 KiB |
BIN
tools/bitmaps/status_icon_music.png
Normal file
After Width: | Height: | Size: 235 B |
BIN
tools/bitmaps/status_icon_phone.png
Normal file
After Width: | Height: | Size: 243 B |
BIN
tools/bitmaps/status_icon_phone_only.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
tools/bitmaps/status_icon_pong.png
Normal file
After Width: | Height: | Size: 2.8 KiB |
BIN
tools/bitmaps/status_icon_runkeeper.png
Normal file
After Width: | Height: | Size: 251 B |
BIN
tools/bitmaps/status_icon_settings.png
Normal file
After Width: | Height: | Size: 245 B |
BIN
tools/bitmaps/status_icon_silent.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
tools/bitmaps/status_icon_sms.png
Normal file
After Width: | Height: | Size: 234 B |