Import of the watch repository from Pebble

This commit is contained in:
Matthieu Jeanson 2024-12-12 16:43:03 -08:00 committed by Katharine Berry
commit 3b92768480
10334 changed files with 2564465 additions and 0 deletions

343
sdk/waftools/pebble_sdk.py Normal file
View file

@ -0,0 +1,343 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from waflib.Configure import conf
from waflib.Errors import ConfigurationError
from waflib import Logs
import sdk_paths
from generate_appinfo import generate_appinfo_c
from process_sdk_resources import generate_resources
import report_memory_usage
from sdk_helpers import (configure_libraries, configure_platform, find_sdk_component,
get_target_platforms, truncate_to_32_bytes, validate_message_keys_object)
def _extract_project_info(conf, info_json, json_filename):
"""
Extract project info from "pebble" object, or copy configuration directly if read from
appinfo.json
:param conf: the ConfigurationContext
:param info_json: the JSON blob contained in appinfo.json or package.json
:return: JSON blob containing project information for build
"""
if 'pebble' in info_json:
project_info = info_json['pebble']
validate_message_keys_object(conf, project_info, 'package.json')
project_info['name'] = info_json['name']
project_info['shortName'] = project_info['longName'] = project_info['displayName']
# Validate version specified in package.json to avoid issues later
if not info_json['version']:
conf.fatal("Project is missing a version")
version = _validate_version(conf, info_json['version'])
project_info['versionLabel'] = version
if isinstance(info_json['author'], basestring):
project_info['companyName'] = (
info_json['author'].split('(', 1)[0].split('<', 1)[0].strip())
elif isinstance(info_json['author'], dict) and 'name' in info_json['author']:
project_info['companyName'] = info_json['author']['name']
else:
conf.fatal("Missing author name in project info")
elif 'package.json' == json_filename:
try:
with open(conf.path.get_src().find_node('appinfo.json').abspath(), 'r') as f:
info_json = json.load(f)
except AttributeError:
conf.fatal("Could not find Pebble project info in package.json and no appinfo.json file"
" exists")
project_info = info_json
validate_message_keys_object(conf, project_info, 'appinfo.json')
else:
project_info = info_json
validate_message_keys_object(conf, project_info, 'appinfo.json')
return project_info
def _generate_appinfo_c_file(task):
"""
This Task generates the appinfo.auto.c file that is included in binary metadata
:param task: the instance of this task
:return: N/A
"""
info_json = dict(getattr(task.generator.env, task.vars[0]))
info_json['shortName'] = truncate_to_32_bytes(info_json['shortName'])
info_json['companyName'] = truncate_to_32_bytes(info_json['companyName'])
current_platform = task.generator.env.PLATFORM_NAME
generate_appinfo_c(info_json, task.outputs[0].abspath(), current_platform)
def _write_appinfo_json_file(task):
"""
This task writes the content of the PROJECT_INFO environment variable to appinfo.json in the
build directory. PROJECT_INFO is generated from reading in either a package.json file or an
old-style appinfo.json file.
:param task: the task instance
:return: None
"""
appinfo = dict(getattr(task.generator.env, task.vars[0]))
capabilities = appinfo.get('capabilities', [])
for lib in dict(task.generator.env).get('LIB_JSON', []):
if 'pebble' in lib:
capabilities.extend(lib['pebble'].get('capabilities', []))
appinfo['capabilities'] = list(set(capabilities))
for key in task.env.BLOCK_MESSAGE_KEYS:
del appinfo['appKeys'][key]
if appinfo:
with open(task.outputs[0].abspath(), 'w') as f:
json.dump(appinfo, f, indent=4)
else:
task.generator.bld.fatal("Unable to find project info to populate appinfo.json file with")
def _validate_version(ctx, original_version):
"""
Validates the format of the version field in an app's project info, and strips off a
zero-valued patch version number, if it exists, to be compatible with the Pebble FW
:param ctx: the ConfigureContext object
:param version: the version provided in project info (package.json/appinfo.json)
:return: a MAJOR.MINOR version that is acceptable for Pebble FW
"""
version = original_version.split('.')
if len(version) > 3:
ctx.fatal("App versions must be of the format MAJOR or MAJOR.MINOR or MAJOR.MINOR.0. An "
"invalid version of {} was specified for the app. Try {}.{}.0 instead".
format(original_version, version[0], version[1]))
elif not (0 <= int(version[0]) <= 255):
ctx.fatal("An invalid or out of range value of {} was specified for the major version of "
"the app. The valid range is 0-255.".format(version[0]))
elif not (0 <= int(version[1]) <= 255):
ctx.fatal("An invalid or out of range value of {} was specified for the minor version of "
"the app. The valid range is 0-255.".format(version[1]))
elif len(version) > 2 and not (int(version[2]) == 0):
ctx.fatal("The patch version of an app must be 0, but {} was specified ({}). Try {}.{}.0 "
"instead.".
format(version[2], original_version, version[0], version[1]))
return version[0] + '.' + version[1]
def options(opt):
"""
Specify the options available when invoking waf; uses OptParse
:param opt: the OptionContext object
:return: N/A
"""
opt.load('pebble_sdk_common')
opt.add_option('-t', '--timestamp', dest='timestamp',
help="Use a specific timestamp to label this package (ie, your repository's "
"last commit time), defaults to time of build")
def configure(conf):
"""
Configure the build using information obtained from a JSON file
:param conf: the ConfigureContext object
:return: N/A
"""
conf.load('pebble_sdk_common')
# This overrides the default config in pebble_sdk_common.py
if conf.options.timestamp:
conf.env.TIMESTAMP = conf.options.timestamp
conf.env.BUNDLE_NAME = "app_{}.pbw".format(conf.env.TIMESTAMP)
else:
conf.env.BUNDLE_NAME = "{}.pbw".format(conf.path.name)
# Read in package.json for environment configuration, or fallback to appinfo.json for older
# projects
info_json_node = (conf.path.get_src().find_node('package.json') or
conf.path.get_src().find_node('appinfo.json'))
if info_json_node is None:
conf.fatal('Could not find package.json')
with open(info_json_node.abspath(), 'r') as f:
info_json = json.load(f)
project_info = _extract_project_info(conf, info_json, info_json_node.name)
conf.env.PROJECT_INFO = project_info
conf.env.BUILD_TYPE = 'rocky' if project_info.get('projectType', None) == 'rocky' else 'app'
if getattr(conf.env.PROJECT_INFO, 'enableMultiJS', False):
if not conf.env.WEBPACK:
conf.fatal("'enableMultiJS' is set to true, but unable to locate webpack module at {} "
"Please set enableMultiJS to false, or reinstall the SDK.".
format(conf.env.NODE_PATH))
if conf.env.BUILD_TYPE == 'rocky':
conf.find_program('node nodejs', var='NODE',
errmsg="Unable to locate the Node command. "
"Please check your Node installation and try again.")
c_files = [c_file.path_from(conf.path.find_node('src'))
for c_file in conf.path.ant_glob('src/**/*.c')]
if c_files:
Logs.pprint('YELLOW', "WARNING: C source files are not supported for Rocky.js "
"projects. The following C files are being skipped: {}".
format(c_files))
if 'resources' in project_info and 'media' in project_info['resources']:
conf.env.RESOURCES_JSON = project_info['resources']['media']
if 'publishedMedia' in project_info['resources']:
conf.env.PUBLISHED_MEDIA_JSON = project_info['resources']['publishedMedia']
conf.env.REQUESTED_PLATFORMS = project_info.get('targetPlatforms', [])
conf.env.LIB_DIR = "node_modules"
get_target_platforms(conf)
# With new-style projects, check for libraries specified in package.json
if 'dependencies' in info_json:
configure_libraries(conf, info_json['dependencies'])
conf.load('process_message_keys')
# base_env is set to a shallow copy of the current ConfigSet for this ConfigureContext
base_env = conf.env
for platform in conf.env.TARGET_PLATFORMS:
# Create a deep copy of the `base_env` ConfigSet and set conf.env to a shallow copy of
# the resultant ConfigSet
conf.setenv(platform, base_env)
configure_platform(conf, platform)
# conf.env is set back to a shallow copy of the default ConfigSet stored in conf.all_envs['']
conf.setenv('')
def build(bld):
"""
This method is invoked from a project's wscript with the `ctx.load('pebble_sdk')` call and
sets up all of the task generators for the SDK. After all of the build methods have run,
the configured task generators will run, generating build tasks and managing dependencies.
See https://waf.io/book/#_task_generators for more details on task generator setup.
:param bld: the BuildContext object
:return: N/A
"""
bld.load('pebble_sdk_common')
# cached_env is set to a shallow copy of the current ConfigSet for this BuildContext
cached_env = bld.env
for platform in bld.env.TARGET_PLATFORMS:
# bld.env is set to a shallow copy of the ConfigSet labeled <platform>
bld.env = bld.all_envs[platform]
# Set the build group (set of TaskGens) to the group labeled <platform>
if bld.env.USE_GROUPS:
bld.set_group(bld.env.PLATFORM_NAME)
# Generate an appinfo file specific to the current platform
build_node = bld.path.get_bld().make_node(bld.env.BUILD_DIR)
bld(rule=_generate_appinfo_c_file,
target=build_node.make_node('appinfo.auto.c'),
vars=['PROJECT_INFO'])
# Generate an appinfo.json file for the current platform to bundle in a PBW
bld(rule=_write_appinfo_json_file,
target=bld.path.get_bld().make_node('appinfo.json'),
vars=['PROJECT_INFO'])
# Generate resources specific to the current platform
resource_node = None
if bld.env.RESOURCES_JSON:
try:
resource_node = bld.path.find_node('resources')
except AttributeError:
bld.fatal("Unable to locate resources at resources/")
# Adding the Rocky.js source file needs to happen before the setup of the Resource
# Generators
if bld.env.BUILD_TYPE == 'rocky':
rocky_js_file = bld.path.find_or_declare('resources/rocky-app.js')
rocky_js_file.parent.mkdir()
bld.pbl_js_build(source=bld.path.ant_glob(['src/rocky/**/*.js',
'src/common/**/*.js']),
target=rocky_js_file)
resource_node = bld.path.get_bld().make_node('resources')
bld.env.RESOURCES_JSON = [{'type': 'js',
'name': 'JS_SNAPSHOT',
'file': rocky_js_file.path_from(resource_node)}]
resource_path = resource_node.path_from(bld.path) if resource_node else None
generate_resources(bld, resource_path)
# Running `pbl_build` needs to happen after the setup of the Resource Generators so
# `report_memory_usage` is aware of the existence of the JS bytecode file
if bld.env.BUILD_TYPE == 'rocky':
rocky_c_file = build_node.make_node('src/rocky.c')
bld(rule='cp "${SRC}" "${TGT}"',
source=find_sdk_component(bld, bld.env, 'include/rocky.c'),
target=rocky_c_file)
# Check for rocky script (This is done in `build` to preserve the script as a node
# instead of as an absolute path as would be required in `configure`. This is to keep
# the signatures the same for both FW builds and SDK builds.
if not bld.env.JS_TOOLING_SCRIPT:
bld.fatal("Unable to locate tooling for this Rocky.js app build. Please "
"try re-installing this version of the SDK.")
bld.pbl_build(source=[rocky_c_file],
target=build_node.make_node("pebble-app.elf"),
bin_type='rocky')
# bld.env is set back to a shallow copy of the original ConfigSet that was set when this `build`
# method was invoked
bld.env = cached_env
@conf
def pbl_program(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_program()`. We
set the custom features `c`, `cprogram` and `pebble_cprogram` to run when this method is
invoked.
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source C files to be built and linked
target - the destination binary file for the compiled source
:return: a task generator instance with keyword arguments specified
"""
kw['bin_type'] = 'app'
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
kw['app'] = kw['target']
kw['resources'] = (
self.path.find_or_declare(self.env.BUILD_DIR).make_node('app_resources.pbpack'))
return self(*k, **kw)
@conf
def pbl_worker(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_worker()`. We set
the custom features `c`, `cprogram` and `pebble_cprogram` to run when this method is invoked.
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source C files to be built and linked
target - the destination binary file for the compiled source
:return: a task generator instance with keyword arguments specified
"""
kw['bin_type'] = 'worker'
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
kw['worker'] = kw['target']
return self(*k, **kw)

View file

@ -0,0 +1,374 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import types
from waflib import Logs
from waflib.Configure import conf
from waflib.Task import Task
from waflib.TaskGen import after_method, before_method, feature
from waflib.Tools import c, c_preproc
import ldscript, process_bundle, process_headers, process_js, report_memory_usage, xcode_pebble
from pebble_sdk_platform import maybe_import_internal
from sdk_helpers import (append_to_attr, find_sdk_component, get_node_from_abspath,
wrap_task_name_with_platform)
# Override the default waf task __str__ method to include display of the HW platform being targeted
Task.__str__ = wrap_task_name_with_platform
def options(opt):
"""
Specify the options available when invoking waf; uses OptParse. This method is called from
app and lib waftools by `opt.load('pebble_sdk_common')`
:param opt: the OptionContext object
:return: N/A
"""
opt.load('gcc')
opt.add_option('-d', '--debug', action='store_true', default=False, dest='debug',
help='Build in debug mode')
opt.add_option('--no-groups', action='store_true', default=False, dest='no_groups')
opt.add_option('--sandboxed-build', action='store_true', default=False, dest='sandbox')
def configure(conf):
"""
Configure the tools for the build by locating SDK prerequisites on the filesystem
:param conf: the ConfigureContext
:return: N/A
"""
if not conf.options.debug:
conf.env.append_value('DEFINES', 'RELEASE')
else:
Logs.pprint("CYAN", "Debug enabled")
if conf.options.no_groups:
conf.env.USE_GROUPS = False
else:
conf.env.USE_GROUPS = True
conf.env.SANDBOX = conf.options.sandbox
conf.env.VERBOSE = conf.options.verbose
conf.env.TIMESTAMP = int(time.time())
# If waf is in ~/pebble-dev/PebbleSDK-X.XX/waf
# Then this file is in ~/pebble-dev/PebbleSDK-X.XX/.waflib-xxxx/waflib/extras/
# => we need to go up 3 directories to find the folder containing waf
pebble_sdk = conf.root.find_dir(os.path.dirname(__file__)).parent.parent.parent
if pebble_sdk is None:
conf.fatal("Unable to find Pebble SDK!\n"
"Please make sure you are running waf directly from your SDK.")
conf.env.PEBBLE_SDK_ROOT = pebble_sdk.abspath()
# Set location of Pebble SDK common folder
pebble_sdk_common = pebble_sdk.find_node('common')
conf.env.PEBBLE_SDK_COMMON = pebble_sdk_common.abspath()
if 'NODE_PATH' in os.environ:
conf.env.NODE_PATH = conf.root.find_node(os.environ['NODE_PATH']).abspath()
webpack_path = conf.root.find_node(conf.env.NODE_PATH).find_node('.bin').abspath()
try:
conf.find_program('webpack', path_list=[webpack_path])
except conf.errors.ConfigurationError:
pass # Error will be caught after checking for enableMultiJS setting
else:
Logs.pprint('YELLOW', "WARNING: Unable to find $NODE_PATH variable required for SDK "
"build. Please verify this build was initiated with a recent "
"pebble-tool.")
maybe_import_internal(conf.env)
def build(bld):
"""
This method is invoked from the app or lib waftool with the `bld.load('pebble_sdk_common')`
call and sets up additional task generators for the SDK.
:param bld: the BuildContext object
:return: N/A
"""
# cached_env is set to a shallow copy of the current ConfigSet for this BuildContext
bld.env = bld.all_envs['']
bld.load('file_name_c_define')
# Process message keys
bld(features='message_keys')
cached_env = bld.env
for platform in bld.env.TARGET_PLATFORMS:
# bld.env is set to a shallow copy of the ConfigSet labeled <platform>
bld.env = bld.all_envs[platform]
# Create a build group (set of TaskGens) for <platform>
if bld.env.USE_GROUPS:
bld.add_group(bld.env.PLATFORM_NAME)
# Generate a linker script specific to the current platform
build_node = bld.path.get_bld().find_or_declare(bld.env.BUILD_DIR)
bld(features='subst',
source=find_sdk_component(bld, bld.env, 'pebble_app.ld.template'),
target=build_node.make_node('pebble_app.ld.auto'),
**bld.env.PLATFORM)
# Locate Rocky JS tooling script
js_tooling_script = find_sdk_component(bld, bld.env, 'tools/generate_snapshot.js')
bld.env.JS_TOOLING_SCRIPT = js_tooling_script if js_tooling_script else None
# bld.env is set back to a shallow copy of the original ConfigSet that was set when this
# `build` method was invoked
bld.env = cached_env
# Create a build group for bundling (should run after the build groups for each platform)
if bld.env.USE_GROUPS:
bld.add_group('bundle')
def _wrap_c_preproc_scan(task):
"""
This function is a scanner function that wraps c_preproc.scan to fix up pebble.h dependencies.
pebble.h is outside out the bld/src trees so therefore it's not considered a valid dependency
and isn't scanned for further dependencies. Normally this would be fine but pebble.h includes
an auto-generated resource id header which is really a dependency. We detect this include and
add the resource id header file to the nodes being scanned by c_preproc.
:param task: the task instance
:return: N/A
"""
(nodes, names) = c_preproc.scan(task)
if 'pebble.h' in names:
nodes.append(get_node_from_abspath(task.generator.bld, task.env.RESOURCE_ID_HEADER))
nodes.append(get_node_from_abspath(task.generator.bld, task.env.MESSAGE_KEYS_HEADER))
return nodes, names
@feature('c')
@before_method('process_source')
def setup_pebble_c(task_gen):
"""
This method is called before all of the c aliases (objects, shlib, stlib, program, etc) and
ensures that the SDK `include` path for the current platform, as well as the project root
directory and the project src directory are included as header search paths (includes) for the
build.
:param task_gen: the task generator instance
:return: N/A
"""
platform = task_gen.env.PLATFORM_NAME
append_to_attr(task_gen, 'includes',
[find_sdk_component(task_gen.bld, task_gen.env, 'include'),
'.', 'include', 'src'])
append_to_attr(task_gen, 'includes', platform)
for lib in task_gen.bld.env.LIB_JSON:
if 'pebble' in lib:
lib_include_node = task_gen.bld.path.find_node(lib['path']).find_node('include')
append_to_attr(task_gen, 'includes',
[lib_include_node,
lib_include_node.find_node(str(lib['name'])).find_node(platform)])
@feature('c')
@after_method('process_source')
def fix_pebble_h_dependencies(task_gen):
"""
This method is called before all of the c aliases (objects, shlib, stlib, program, etc) and
ensures that the _wrap_c_preproc_scan method is run for all c tasks.
:param task_gen: the task generator instance
:return: N/A
"""
for task in task_gen.tasks:
if type(task) == c.c:
# Swap out the bound member function for our own
task.scan = types.MethodType(_wrap_c_preproc_scan, task, c.c)
@feature('pebble_cprogram')
@before_method('process_source')
def setup_pebble_cprogram(task_gen):
"""
This method is called before all of the c aliases (objects, shlib, stlib, program, etc) and
adds the appinfo.auto.c file to the source file list, adds the SDK pebble library to the lib
path for the build, sets the linkflags for the build, and specifies the linker script to
use during the linking step.
:param task_gen: the task generator instance
:return: None
"""
build_node = task_gen.path.get_bld().make_node(task_gen.env.BUILD_DIR)
platform = task_gen.env.PLATFORM_NAME
if not hasattr(task_gen, 'bin_type') or getattr(task_gen, 'bin_type') != 'lib':
append_to_attr(task_gen, 'source', build_node.make_node('appinfo.auto.c'))
append_to_attr(task_gen, 'source', build_node.make_node('src/resource_ids.auto.c'))
if task_gen.env.MESSAGE_KEYS:
append_to_attr(task_gen,
'source',
get_node_from_abspath(task_gen.bld,
task_gen.env.MESSAGE_KEYS_DEFINITION))
append_to_attr(task_gen, 'stlibpath',
find_sdk_component(task_gen.bld, task_gen.env, 'lib').abspath())
append_to_attr(task_gen, 'stlib', 'pebble')
for lib in task_gen.bld.env.LIB_JSON:
# Skip binary check for non-Pebble libs
if not 'pebble' in lib:
continue
binaries_path = task_gen.bld.path.find_node(lib['path']).find_node('binaries')
if binaries_path:
# Check for existence of platform folders inside binaries folder
platform_binary_path = binaries_path.find_node(platform)
if not platform_binary_path:
task_gen.bld.fatal("Library {} is missing the {} platform folder in {}".
format(lib['name'], platform, binaries_path))
# Check for existence of binary for each platform
if lib['name'].startswith('@'):
scoped_name = lib['name'].rsplit('/', 1)
lib_binary = (platform_binary_path.find_node(str(scoped_name[0])).
find_node("lib{}.a".format(scoped_name[1])))
else:
lib_binary = platform_binary_path.find_node("lib{}.a".format(lib['name']))
if not lib_binary:
task_gen.bld.fatal("Library {} is missing a binary for the {} platform".
format(lib['name'], platform))
# Link library binary (supports scoped names)
if lib['name'].startswith('@'):
append_to_attr(task_gen, 'stlibpath',
platform_binary_path.find_node(str(scoped_name[0])).abspath())
append_to_attr(task_gen, 'stlib', scoped_name[1])
else:
append_to_attr(task_gen, 'stlibpath', platform_binary_path.abspath())
append_to_attr(task_gen, 'stlib', lib['name'])
append_to_attr(task_gen, 'linkflags',
['-Wl,--build-id=sha1',
'-Wl,-Map,pebble-{}.map,--emit-relocs'.format(getattr(task_gen,
'bin_type',
'app'))])
if not hasattr(task_gen, 'ldscript'):
task_gen.ldscript = (
build_node.find_or_declare('pebble_app.ld.auto').path_from(task_gen.path))
def _get_entry_point(ctx, js_type, waf_js_entry_point):
"""
Returns the appropriate JS entry point, extracted from a project's package.json file,
wscript or common SDK default
:param ctx: the BuildContext
:param js_type: type of JS build, pkjs or rockyjs
:param waf_js_entry_point: the JS entry point specified by waftools
:return: the JS entry point for the bundled JS file
"""
fallback_entry_point = waf_js_entry_point
if not fallback_entry_point:
if js_type == 'pkjs':
if ctx.path.find_node('src/pkjs/index.js'):
fallback_entry_point = 'src/pkjs/index.js'
else:
fallback_entry_point = 'src/js/app.js'
if js_type == 'rockyjs':
fallback_entry_point = 'src/rocky/index.js'
project_info = ctx.env.PROJECT_INFO
if not project_info.get('main'):
return fallback_entry_point
if project_info['main'].get(js_type):
return str(project_info['main'][js_type])
return fallback_entry_point
@conf
def pbl_bundle(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_bundle`. We
set the custome features `js` and `bundle` to run when this method is invoked.
:param self: the BuildContext object
:param k: none expected
:param kw:
binaries - a list containing dictionaries specifying the HW platform targeted by the
binary built, the app binary, and an optional worker binary
js - the source JS files to be bundled
js_entry_file - an optional parameter to specify the entry JS file when
enableMultiJS is set to 'true'
:return: a task generator instance with keyword arguments specified
"""
if kw.get('bin_type', 'app') == 'lib':
kw['features'] = 'headers js package'
else:
if self.env.BUILD_TYPE == 'rocky':
kw['js_entry_file'] = _get_entry_point(self, 'pkjs', kw.get('js_entry_file'))
kw['features'] = 'js bundle'
return self(*k, **kw)
@conf
def pbl_build(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_build()`. We
set the custom features `c`, `cprogram` and `pebble_cprogram` to run when this method is
invoked. This method is intended to someday replace `pbl_program` and `pbl_worker` so that
all apps, workers, and libs will run through this method.
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source C files to be built and linked
target - the destination binary file for the compiled source
:return: a task generator instance with keyword arguments specified
"""
valid_bin_types = ('app', 'worker', 'lib', 'rocky')
bin_type = kw.get('bin_type', None)
if bin_type not in valid_bin_types:
self.fatal("The pbl_build method requires that a valid bin_type attribute be specified. "
"Valid options are {}".format(valid_bin_types))
if bin_type == 'rocky':
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
elif bin_type in ('app', 'worker'):
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
kw[bin_type] = kw['target']
elif bin_type == 'lib':
kw['features'] = 'c cstlib memory_usage'
path, name = kw['target'].rsplit('/', 1)
kw['lib'] = self.path.find_or_declare(path).make_node("lib{}.a".format(name))
# Pass values needed for memory usage report
if bin_type != 'worker':
kw['resources'] = (
self.env.PROJECT_RESBALL if bin_type == 'lib' else
self.path.find_or_declare(self.env.BUILD_DIR).make_node('app_resources.pbpack'))
return self(*k, **kw)
@conf
def pbl_js_build(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_cross_compile()`.
When this method is invoked, we set the custom feature `rockyjs` to run, which handles
processing of JS files in preparation for Rocky.js bytecode compilation (this actually
happens during resource generation)
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source JS files that will eventually be compiled into bytecode
target - the destination JS file that will be specified as the source file for the
bytecode compilation process
:return: a task generator instance with keyword arguments specified
"""
kw['js_entry_file'] = _get_entry_point(self, 'rockyjs', kw.get('js_entry_file'))
kw['features'] = 'rockyjs'
return self(*k, **kw)

View file

@ -0,0 +1,123 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sdk_paths
from process_sdk_resources import generate_resources
from sdk_helpers import (configure_libraries, configure_platform, get_target_platforms,
validate_message_keys_object)
def options(opt):
"""
Specify the options available when invoking waf; uses OptParse
:param opt: the OptionContext object
:return: N/A
"""
opt.load('pebble_sdk_common')
opt.add_option('-t', '--timestamp', dest='timestamp',
help="Use a specific timestamp to label this package (ie, your repository's last commit time), "
"defaults to time of build")
def configure(conf):
"""
Configure the build using information obtained from the package.json file
:param conf: the ConfigureContext object
:return: N/A
"""
conf.load('pebble_sdk_common')
# This overrides the default config in pebble_sdk_common.py
if conf.options.timestamp:
conf.env.TIMESTAMP = conf.options.timestamp
conf.env.BUNDLE_NAME = "dist.zip"
package_json_node = conf.path.get_src().find_node('package.json')
if package_json_node is None:
conf.fatal('Could not find package.json')
with open(package_json_node.abspath(), 'r') as f:
package_json = json.load(f)
# Extract project info from "pebble" object in package.json
project_info = package_json['pebble']
project_info['name'] = package_json['name']
validate_message_keys_object(conf, project_info, 'package.json')
conf.env.PROJECT_INFO = project_info
conf.env.BUILD_TYPE = 'lib'
conf.env.REQUESTED_PLATFORMS = project_info.get('targetPlatforms', [])
conf.env.LIB_DIR = "node_modules"
get_target_platforms(conf)
# With new-style projects, check for libraries specified in package.json
if 'dependencies' in package_json:
configure_libraries(conf, package_json['dependencies'])
conf.load('process_message_keys')
if 'resources' in project_info and 'media' in project_info['resources']:
conf.env.RESOURCES_JSON = package_json['pebble']['resources']['media']
# base_env is set to a shallow copy of the current ConfigSet for this ConfigureContext
base_env = conf.env
for platform in conf.env.TARGET_PLATFORMS:
# Create a deep copy of the `base_env` ConfigSet and set conf.env to a shallow copy of
# the resultant ConfigSet
conf.setenv(platform, base_env)
configure_platform(conf, platform)
# conf.env is set back to a shallow copy of the default ConfigSet stored in conf.all_envs['']
conf.setenv('')
def build(bld):
"""
This method is invoked from a project's wscript with the `ctz.load('pebble_sdk_lib')` call
and sets up all of the task generators for the SDK. After all of the build methods have run,
the configured task generators will run, generating build tasks and managing dependencies. See
https://waf.io/book/#_task_generators for more details on task generator setup.
:param bld: the BuildContext object
:return: N/A
"""
bld.load('pebble_sdk_common')
# cached_env is set to a shallow copy of the current ConfigSet for this BuildContext
cached_env = bld.env
for platform in bld.env.TARGET_PLATFORMS:
# bld.env is set to a shallow copy of the ConfigSet labeled <platform>
bld.env = bld.all_envs[platform]
# Set the build group (set of TaskGens) to the group labeled <platform>
if bld.env.USE_GROUPS:
bld.set_group(bld.env.PLATFORM_NAME)
# Generate resources specific to the current platform
resource_path = None
if bld.env.RESOURCES_JSON:
try:
resource_path = bld.path.find_node('src').find_node('resources').path_from(bld.path)
except AttributeError:
bld.fatal("Unable to locate resources at src/resources/")
generate_resources(bld, resource_path)
# bld.env is set back to a shallow copy of the original ConfigSet that was set when this `build`
# method was invoked
bld.env = cached_env

View file

@ -0,0 +1,209 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from waflib import Task
from waflib.TaskGen import feature
import mkbundle
from pebble_package import LibraryPackage
from process_elf import generate_bin_file
from resources.types.resource_ball import ResourceBall
@Task.update_outputs
class lib_package(Task.Task):
"""
Task class to generate a library bundle for distribution
"""
def run(self):
"""
This method executes when the package task runs
:return: N/A
"""
bld = self.generator.bld
build_dir = bld.bldnode
includes = {include.path_from(build_dir.find_node('include')): include.abspath()
for include in getattr(self, 'includes', [])}
binaries = {binary.path_from(build_dir): binary.abspath()
for binary in getattr(self, 'binaries', [])}
js = {js.path_from(build_dir.find_node('js')): js.abspath()
for js in getattr(self, 'js', [])}
resource_definitions = []
for resball in getattr(self, 'resources', []):
resource_definitions.extend(ResourceBall.load(resball.abspath()).get_all_declarations())
reso_list = []
for definition in resource_definitions:
if definition.target_platforms:
platforms = list(set(definition.target_platforms) & set(bld.env.TARGET_PLATFORMS))
else:
platforms = bld.env.TARGET_PLATFORMS
for platform in platforms:
platform_path = build_dir.find_node(bld.all_envs[platform].BUILD_DIR).relpath()
reso_list.append(build_dir.find_node("{}.{}.reso".format(
os.path.join(platform_path,
bld.path.find_node(definition.sources[0]).relpath()),
str(definition.name)
)))
resources = {
os.path.join(resource.path_from(build_dir).split('/', 1)[0],
resource.path_from(build_dir).split('/', 3)[3]): resource.abspath()
for resource in reso_list}
package = LibraryPackage(self.outputs[0].abspath())
package.add_files(includes=includes, binaries=binaries, resources=resources, js=js)
package.pack()
@Task.update_outputs
class app_bundle(Task.Task):
"""
Task class to generate an app bundle for distribution
"""
def run(self):
"""
This method executes when the bundle task runs
:return: N/A
"""
binaries = getattr(self, 'bin_files')
js_files = getattr(self, 'js_files')
outfile = self.outputs[0].abspath()
mkbundle.make_watchapp_bundle(
timestamp=self.generator.bld.env.TIMESTAMP,
appinfo=self.generator.bld.path.get_bld().find_node('appinfo.json').abspath(),
binaries=binaries,
js=[js_file.abspath() for js_file in js_files],
outfile=outfile
)
@feature('package')
def make_lib_bundle(task_gen):
"""
Bundle the build artifacts into a distributable library package.
Keyword arguments:
js -- A list of javascript files to package into the resulting bundle
includes -- A list of header files to package into library bundle
:param task_gen: the task generator instance
:return: None
"""
js = task_gen.to_nodes(getattr(task_gen, 'js', []))
includes = task_gen.to_nodes(getattr(task_gen, 'includes', []))
resources = []
binaries = []
for platform in task_gen.bld.env.TARGET_PLATFORMS:
bld_dir = task_gen.path.get_bld().find_or_declare(platform)
env = task_gen.bld.all_envs[platform]
resources.append(getattr(env, 'PROJECT_RESBALL'))
project_name = env.PROJECT_INFO['name']
if project_name.startswith('@'):
scoped_name = project_name.rsplit('/', 1)
binaries.append(
bld_dir.find_or_declare(str(scoped_name[0])).
find_or_declare("lib{}.a".format(scoped_name[1])))
else:
binaries.append(bld_dir.find_or_declare("lib{}.a".format(project_name)))
task = task_gen.create_task('lib_package',
[],
task_gen.bld.path.make_node(task_gen.bld.env.BUNDLE_NAME))
task.js = js
task.includes = includes
task.resources = resources
task.binaries = binaries
task.dep_nodes = js + includes + resources + binaries
# PBL-40925 Use pebble_package.py instead of mkbundle.py
@feature('bundle')
def make_pbl_bundle(task_gen):
"""
Bundle the build artifacts into a distributable package.
Keyword arguments:
js -- A list of javascript files to package into the resulting bundle
binaries -- A list of the binaries for each platform to include in the bundle
:param task_gen: the task generator instance
:return: None
"""
bin_files = []
bundle_sources = []
js_files = getattr(task_gen, 'js', [])
has_pkjs = bool(getattr(task_gen, 'js', False))
if has_pkjs:
bundle_sources.extend(task_gen.to_nodes(task_gen.js))
cached_env = task_gen.bld.env
if hasattr(task_gen, 'bin_type') and task_gen.bin_type == 'rocky':
binaries = []
for platform in task_gen.bld.env.TARGET_PLATFORMS:
binaries.append({"platform": platform,
"app_elf": "{}/pebble-app.elf".format(
task_gen.bld.all_envs[platform].BUILD_DIR)})
rocky_source_node = task_gen.bld.path.get_bld().make_node('resources/rocky-app.js')
js_files.append(rocky_source_node)
bundle_sources.append(rocky_source_node)
else:
binaries = task_gen.binaries
for binary in binaries:
task_gen.bld.env = task_gen.bld.all_envs[binary['platform']]
platform_build_node = task_gen.bld.path.find_or_declare(task_gen.bld.env.BUILD_DIR)
app_elf_file = task_gen.bld.path.get_bld().make_node(binary['app_elf'])
if app_elf_file is None:
raise Exception("Must specify elf argument to bundle")
worker_bin_file = None
if 'worker_elf' in binary:
worker_elf_file = task_gen.bld.path.get_bld().make_node(binary['worker_elf'])
app_bin_file = generate_bin_file(task_gen, 'app', app_elf_file, has_pkjs,
has_worker=True)
worker_bin_file = generate_bin_file(task_gen, 'worker', worker_elf_file, has_pkjs,
has_worker=True)
bundle_sources.append(worker_bin_file)
else:
app_bin_file = generate_bin_file(task_gen, 'app', app_elf_file, has_pkjs,
has_worker=False)
resources_pack = platform_build_node.make_node('app_resources.pbpack')
bundle_sources.extend([app_bin_file, resources_pack])
bin_files.append({'watchapp': app_bin_file.abspath(),
'resources': resources_pack.abspath(),
'worker_bin': worker_bin_file.abspath() if worker_bin_file else None,
'sdk_version': {'major': task_gen.bld.env.SDK_VERSION_MAJOR,
'minor': task_gen.bld.env.SDK_VERSION_MINOR},
'subfolder': task_gen.bld.env.BUNDLE_BIN_DIR})
task_gen.bld.env = cached_env
bundle_output = task_gen.bld.path.get_bld().make_node(task_gen.bld.env.BUNDLE_NAME)
task = task_gen.create_task('app_bundle', [], bundle_output)
task.bin_files = bin_files
task.js_files = js_files
task.dep_nodes = bundle_sources

View file

@ -0,0 +1,50 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import objcopy
import pebble_sdk_gcc
# TODO: PBL-33841 Make this a feature
def generate_bin_file(task_gen, bin_type, elf_file, has_pkjs, has_worker):
"""
Generate bin file by injecting metadata from elf file and resources file
:param task_gen: the task generator instance
:param bin_type: the type of binary being built (app, worker, lib)
:param elf_file: the path to the compiled elf file
:param has_pkjs: boolean for whether the build contains PebbleKit JS files
:param has_worker: boolean for whether the build contains a worker binary
:return: the modified binary file with injected metadata
"""
platform_build_node = task_gen.bld.path.get_bld().find_node(task_gen.bld.env.BUILD_DIR)
packaged_files = [elf_file]
resources_file = None
if bin_type != 'worker':
resources_file = platform_build_node.find_or_declare('app_resources.pbpack')
packaged_files.append(resources_file)
raw_bin_file = platform_build_node.make_node('pebble-{}.raw.bin'.format(bin_type))
bin_file = platform_build_node.make_node('pebble-{}.bin'.format(bin_type))
task_gen.bld(rule=objcopy.objcopy_bin, source=elf_file, target=raw_bin_file)
pebble_sdk_gcc.gen_inject_metadata_rule(task_gen.bld,
src_bin_file=raw_bin_file,
dst_bin_file=bin_file,
elf_file=elf_file,
resource_file=resources_file,
timestamp=task_gen.bld.env.TIMESTAMP,
has_pkjs=has_pkjs,
has_worker=has_worker)
return bin_file

View file

@ -0,0 +1,83 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from waflib.TaskGen import before_method, feature
from waflib import Context, Task
from sdk_helpers import get_node_from_abspath
@feature('headers')
@before_method('make_lib_bundle')
def process_headers(task_gen):
"""
Process all of the headers specified in the wscript file, as well as the headers generated
during the build process for the resource ids and message keys, as needed.
Keyword arguments:
includes -- A list of header files to copy
:param task_gen: the task generator instance
:return: None
"""
header_nodes = task_gen.to_nodes(task_gen.includes)
for platform in task_gen.env.TARGET_PLATFORMS:
env = task_gen.bld.all_envs[platform]
header_nodes.append(get_node_from_abspath(task_gen.bld, env['RESOURCE_ID_HEADER']))
# Add .h file containing app message keys
if 'MESSAGE_KEYS_HEADER' in dict(task_gen.env):
header_nodes.append(
get_node_from_abspath(task_gen.bld, task_gen.env['MESSAGE_KEYS_HEADER']))
# Copy header files to build/include/<libname> to provide naming collision protection in
# #includes
lib_name = str(task_gen.env.PROJECT_INFO['name'])
lib_include_node = task_gen.bld.path.get_bld().make_node('include').make_node(lib_name)
target_nodes = []
for header in header_nodes:
base_node = (task_gen.bld.path.get_bld() if header.is_child_of(task_gen.bld.path.get_bld())
else task_gen.bld.path)
if header.is_child_of(base_node.find_node('include')):
header_path = header.path_from(base_node.find_node('include'))
else:
header_path = header.path_from(base_node)
target_node = lib_include_node.make_node(header_path)
target_node.parent.mkdir()
target_nodes.append(target_node)
task_gen.includes = target_nodes
task_gen.create_task('copy_headers', src=header_nodes, tgt=target_nodes)
@Task.update_outputs
class copy_headers(Task.Task):
"""
Task class to copy specified headers from a source location to a target location
"""
def run(self):
"""
This method executes when the copy headers task runs
:return: N/A
"""
bld = self.generator.bld
if len(self.inputs) != len(self.outputs):
bld.fatal("Number of input headers ({}) does not match number of target headers ({})".
format(len(self.inputs), len(self.outputs)))
for i in range(len(self.inputs)):
bld.cmd_and_log('cp "{src}" "{tgt}"'.
format(src=self.inputs[i].abspath(), tgt=self.outputs[i].abspath()),
quiet=Context.BOTH)

266
sdk/waftools/process_js.py Normal file
View file

@ -0,0 +1,266 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import subprocess
from string import Template
from waflib.Errors import WafError
from waflib.TaskGen import before_method, feature
from waflib import Context, Logs, Node, Task
from sdk_helpers import find_sdk_component, get_node_from_abspath
from sdk_helpers import process_package
@feature('rockyjs')
@before_method('process_sdk_resources')
def process_rocky_js(task_gen):
"""
Lint the JS source files using a Rocky-specific linter
Keyword arguments:
js -- a list of JS files to process for the build
:param task_gen: the task generator instance
:return: N/A
"""
bld = task_gen.bld
task_gen.mappings = {'': (lambda task_gen, node: None)}
js_nodes = task_gen.to_nodes(task_gen.source)
target = task_gen.to_nodes(task_gen.target)
if not js_nodes:
task_gen.bld.fatal("Project does not contain any source code.")
js_nodes.append(find_sdk_component(bld, task_gen.env, 'include/rocky.js'))
# This locates the available node_modules folders and performs a search for the rocky-lint
# module. This code remains in this file un-abstracted because similar functionality is not yet
# needed elsewhere.
node_modules = []
rocky_linter = None
if bld.path.find_node('node_modules'):
node_modules.append(bld.path.find_node('node_modules'))
if bld.env.NODE_PATH:
node_modules.append(bld.root.find_node(bld.env.NODE_PATH))
for node_modules_node in node_modules:
rocky_linter = node_modules_node.ant_glob('rocky-lint/**/rocky-lint.js')
if rocky_linter:
rocky_linter = rocky_linter[0]
break
rocky_definitions = find_sdk_component(bld, task_gen.env, 'tools/rocky-lint/rocky.d.ts')
if rocky_linter and rocky_definitions:
lintable_nodes = [node for node in js_nodes if node.is_child_of(bld.path)]
lint_task = task_gen.create_task('lint_js', src=lintable_nodes)
lint_task.linter = [task_gen.env.NODE,
rocky_linter.path_from(bld.path),
'-d',
rocky_definitions.path_from(bld.path)]
else:
Logs.pprint('YELLOW', "Rocky JS linter not present - skipping lint task")
# Create JS merge task for Rocky.js files
merge_task = task_gen.create_task('merge_js', src=js_nodes, tgt=target)
merge_task.js_entry_file = task_gen.js_entry_file
merge_task.js_build_type = 'rocky'
@feature('js')
@before_method('make_pbl_bundle', 'make_lib_bundle')
def process_js(task_gen):
"""
Merge the JS source files into a single JS file if enableMultiJS is set to 'true', otherwise,
skip JS processing
Keyword arguments:
js -- A list of JS files to process for the build
:param task_gen: the task generator instance
:return: N/A
"""
# Skip JS handling if there are no JS files
js_nodes = task_gen.to_nodes(getattr(task_gen, 'js', []))
if not js_nodes:
return
# Create JS merge task if the project specifies "enableMultiJS: true"
if task_gen.env.PROJECT_INFO.get('enableMultiJS', False):
target_js = task_gen.bld.path.get_bld().make_node('pebble-js-app.js')
target_js_map = target_js.change_ext('.js.map')
task_gen.js = [target_js, target_js_map]
merge_task = task_gen.create_task('merge_js', src=js_nodes, tgt=[target_js, target_js_map])
merge_task.js_entry_file = task_gen.js_entry_file
merge_task.js_build_type = 'pkjs'
merge_task.js_source_map_config = {
'sourceMapFilename': target_js_map.name
}
return
# Check for pebble-js-app.js if developer does not specify "enableMultiJS: true" in
# the project
if task_gen.env.BUILD_TYPE != 'lib':
for node in js_nodes:
if 'pebble-js-app.js' in node.abspath():
break
else:
Logs.pprint("CYAN",
"WARNING: enableMultiJS is not enabled for this project and "
"pebble-js-app.js does not exist")
# For apps without multiJS enabled and libs, copy JS files from src folder to build folder,
# skipping any files already in the build folder
js_nodes_to_copy = [js_node for js_node in js_nodes if not js_node.is_bld()]
if not js_nodes_to_copy:
task_gen.js = js_nodes
return
target_nodes = []
for js in js_nodes_to_copy:
if js.is_child_of(task_gen.bld.path.find_node('src')):
js_path = js.path_from(task_gen.bld.path.find_node('src'))
else:
js_path = os.path.abspath(js.path_from(task_gen.bld.path))
target_node = task_gen.bld.path.get_bld().make_node(js_path)
target_node.parent.mkdir()
target_nodes.append(target_node)
task_gen.js = target_nodes + list(set(js_nodes) - set(js_nodes_to_copy))
task_gen.create_task('copy_js', src=js_nodes_to_copy, tgt=target_nodes)
class copy_js(Task.Task):
"""
Task class for copying source JS files to a target location
"""
def run(self):
"""
This method executes when the JS copy task runs
:return: N/A
"""
bld = self.generator.bld
if len(self.inputs) != len(self.outputs):
bld.fatal("Number of input JS files ({}) does not match number of target JS files ({})".
format(len(self.inputs), len(self.outputs)))
for i in range(len(self.inputs)):
bld.cmd_and_log('cp "{src}" "{tgt}"'.
format(src=self.inputs[i].abspath(), tgt=self.outputs[i].abspath()),
quiet=Context.BOTH)
class merge_js(Task.Task):
"""
Task class for merging all specified JS files into one `pebble-js-app.js` file
"""
def run(self):
"""
This method executes when the JS merge task runs
:return: N/A
"""
bld = self.generator.bld
js_build_type = getattr(self, 'js_build_type')
# Check for a valid JS entry point among JS files
js_nodes = self.inputs
entry_point = bld.path.find_resource(self.js_entry_file)
if entry_point not in js_nodes:
bld.fatal("\n\nJS entry file '{}' not found in JS source files '{}'. We expect to find "
"a javascript file here that we will execute directly when your app launches."
"\n\nIf you are an advanced user, you can supply the 'js_entry_file' "
"parameter to 'pbl_bundle' in your wscript to change the default entry point."
" Note that doing this will break CloudPebble compatibility.".
format(self.js_entry_file, js_nodes))
target_js = self.outputs[0]
entry = [
entry_point.abspath()
]
if js_build_type == 'pkjs':
# NOTE: The order is critical here.
# _pkjs_shared_additions.js MUST be the first in the `entry` array!
entry.insert(0, "_pkjs_shared_additions.js")
if self.env.BUILD_TYPE == 'rocky':
entry.insert(1, "_pkjs_message_wrapper.js")
common_node = bld.root.find_node(self.generator.env.PEBBLE_SDK_COMMON)
tools_webpack_node = common_node.find_node('tools').find_node('webpack')
webpack_config_template_node = tools_webpack_node.find_node('webpack-config.js.pytemplate')
with open(webpack_config_template_node.abspath()) as f:
webpack_config_template_content = f.read()
search_paths = [
common_node.find_node('include').abspath(),
tools_webpack_node.abspath(),
bld.root.find_node(self.generator.env.NODE_PATH).abspath(),
bld.path.get_bld().make_node('js').abspath()
]
pebble_packages = [str(lib['name']) for lib in bld.env.LIB_JSON if 'pebble' in lib]
aliases = {lib: "{}/dist/js".format(lib) for lib in pebble_packages}
info_json_file = bld.path.find_node('package.json') or bld.path.find_node('appinfo.json')
if info_json_file:
aliases.update({'app_package.json': info_json_file.abspath()})
config_file = (
bld.path.get_bld().make_node("webpack/{}/webpack.config.js".format(js_build_type)))
config_file.parent.mkdir()
with open(config_file.abspath(), 'w') as f:
m = {
'IS_SANDBOX': bool(self.env.SANDBOX),
'ENTRY_FILENAMES': entry,
'OUTPUT_PATH': target_js.parent.path_from(bld.path),
'OUTPUT_FILENAME': target_js.name,
'RESOLVE_ROOTS': search_paths,
'RESOLVE_ALIASES': aliases,
'SOURCE_MAP_CONFIG': getattr(self, 'js_source_map_config', None)
}
f.write(Template(webpack_config_template_content).substitute(
{k: json.dumps(m[k], separators=(',\n',': ')) for k in m }))
cmd = (
"'{webpack}' --config {config} --display-modules".
format(webpack=self.generator.env.WEBPACK, config=config_file.path_from(bld.path)))
try:
out = bld.cmd_and_log(cmd, quiet=Context.BOTH, output=Context.STDOUT)
except WafError as e:
bld.fatal("JS bundling failed\n{}\n{}".format(e.stdout, e.stderr))
else:
if self.env.VERBOSE > 0:
Logs.pprint('WHITE', out)
class lint_js(Task.Task):
"""
Task class for linting JS source files with a specified linter script.
"""
def run(self):
"""
This method executes when the JS lint task runs
:return: N/A
"""
self.name = 'lint_js'
js_nodes = self.inputs
for js_node in js_nodes:
cmd = self.linter + [js_node.path_from(self.generator.bld.path)]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if err:
Logs.pprint('CYAN', "\n========== Lint Results: {} ==========\n".format(js_node))
Logs.pprint('WHITE', "{}\n{}\n".format(out, err))
if proc.returncode != 0:
self.generator.bld.fatal("Project failed linting.")

View file

@ -0,0 +1,229 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from re import findall
from waflib.TaskGen import before_method, feature
from waflib import Logs, Task
from sdk_helpers import get_node_from_abspath
header = (
"""#pragma once
#include <stdint.h>
//
// AUTOGENERATED BY BUILD
// DO NOT MODIFY - CHANGES WILL BE OVERWRITTEN
//
""")
definitions_file = (
"""
#include <stdint.h>
//
// AUTOGENERATED BY BUILD
// DO NOT MODIFY - CHANGES WILL BE OVERWRITTEN
//
""")
def configure(conf):
"""
Configure the build by collecting all of the project's appKeys, as well the appKeys of any
dependencies, and writing them out to a header file and a JSON file for use in the project
:param conf: the ConfigureContext
:return: N/A
"""
if conf.env.BUILD_TYPE != 'lib':
if not dict(conf.env.PROJECT_INFO).get('enableMultiJS', False):
Logs.pprint("CYAN",
"WARNING: enableMultiJS is not enabled for this project. message_keys.json "
"will not be included in your project unless you add it to your "
"pebble-js-app.js file.")
keys = conf.env.PROJECT_INFO.get('messageKeys', conf.env.PROJECT_INFO.get('appKeys', []))
if conf.env.BUILD_TYPE == 'rocky':
if keys:
conf.fatal("Custom messageKeys are not supported for Rocky.js projects. Please "
"remove any messageKeys listed in your package.json file.")
else:
keys = {
"ControlKeyResetRequest": 1,
"ControlKeyResetComplete": 2,
"ControlKeyChunk": 3,
"ControlKeyUnsupportedError": 4,
}
key_list = []
key_dict = {}
block_message_keys = []
if keys:
if isinstance(keys, list):
key_list = keys
elif isinstance(keys, dict):
if conf.env.BUILD_TYPE == 'lib':
conf.fatal("Libraries can only specify an array of messageKeys; other object types "
"are not supported.")
key_dict = keys
else:
conf.fatal("You have specified an invalid messageKeys object in your project JSON "
"file.")
combined_key_list = key_list + key_dict.keys()
for lib in conf.env.LIB_JSON:
if not 'pebble' in lib or not 'messageKeys' in lib['pebble']:
continue
lib_keys = lib['pebble']['messageKeys']
if isinstance(lib_keys, list):
for key in lib_keys:
if key in combined_key_list:
conf.fatal("The messageKey '{}' has already been used and cannot be re-used by "
"the {} library.".format(key, lib['name']))
combined_key_list.append(key)
key_list.extend(lib_keys)
else:
conf.fatal("'{}' has an invalid messageKeys object. "
"Libraries can only specify an messageKeys array.".format(lib['name']))
if key_list:
next_key = 10000
multi_keys = [key for key in key_list if ']' in key]
single_keys = [key for key in key_list if ']' not in key]
for key in multi_keys:
try:
key_name, num_keys = findall(r"([\w]+)\[(\d+)\]$", key)[0]
except IndexError:
suggested_key_name = key.split('[', 1)[0]
conf.fatal("An invalid message key of `{}` was specified. Verify that a valid "
"length is specified if you are trying to allocate an array of keys "
"with a single identifier. For example, try `{}[2]`.".
format(key, suggested_key_name))
else:
key_dict.update({key_name: next_key})
next_key += int(num_keys)
block_message_keys.append(key_name)
key_dict.update({value: key for key, value in enumerate(single_keys, start=next_key)})
conf.env.PROJECT_INFO['messageKeys'] = key_dict
conf.env.PROJECT_INFO['appKeys'] = key_dict # Support legacy appinfo.json generation
conf.env.MESSAGE_KEYS = key_dict
conf.env.BLOCK_MESSAGE_KEYS = block_message_keys
bld_dir = conf.path.get_bld()
conf.env.MESSAGE_KEYS_HEADER = bld_dir.make_node('include/message_keys.auto.h').abspath()
if key_dict:
conf.env.MESSAGE_KEYS_DEFINITION = bld_dir.make_node('src/message_keys.auto.c').abspath()
conf.env.MESSAGE_KEYS_JSON = bld_dir.make_node('js/message_keys.json').abspath()
@feature('message_keys')
@before_method('cprogram', 'process_js', 'process_headers')
def process_message_keys(task_gen):
"""
Create the appropriate message key output files for the type of build, a header for a library,
and a header + JSON file for a library
:param task_gen: the task generator instance
:return: None
"""
message_keys = task_gen.env['MESSAGE_KEYS']
bld = task_gen.bld
# Create a header file that is included during lib/app builds
header_task = (
task_gen.create_task('message_key_header',
tgt=get_node_from_abspath(task_gen.bld,
getattr(task_gen.env,
'MESSAGE_KEYS_HEADER'))))
header_task.message_keys = message_keys
header_task.dep_vars = message_keys
if bld.env.BUILD_TYPE == 'lib' or not message_keys:
return
# Create a C file to satisfy any extern header files
definitions_task = (
task_gen.create_task('message_key_definitions',
tgt=get_node_from_abspath(task_gen.bld,
getattr(task_gen.env,
'MESSAGE_KEYS_DEFINITION'))))
definitions_task.message_keys = message_keys
definitions_task.dep_vars = message_keys
# Create a JSON file for apps to require
bld.path.get_bld().make_node('js').mkdir()
json_task = (
task_gen.create_task('message_key_json',
tgt=get_node_from_abspath(task_gen.bld,
getattr(task_gen.env, 'MESSAGE_KEYS_JSON'))))
json_task.message_keys = message_keys
json_task.dep_vars = message_keys
@Task.update_outputs
class message_key_header(Task.Task):
"""
Task class for creating a header file with the message key definitions for the project
"""
def run(self):
"""
This method executes when the message key header task runs
:return: N/A
"""
self.outputs[0].parent.mkdir()
with open(self.outputs[0].abspath(), 'w') as f:
f.write(header)
for k, v in sorted(self.message_keys.items(), key=lambda x: x[0]):
f.write("extern uint32_t MESSAGE_KEY_{};\n".format(k))
@Task.update_outputs
class message_key_definitions(Task.Task):
"""
Task class for creating a C definitions file with the message key definitions for the project
"""
def run(self):
"""
This method executes when the message key definitions task runs
:return: N/A
"""
self.outputs[0].parent.mkdir()
with open(self.outputs[0].abspath(), 'w') as f:
f.write(definitions_file)
for k, v in sorted(self.message_keys.items(), key=lambda x: x[0]):
f.write("uint32_t MESSAGE_KEY_{} = {};\n".format(k, v))
@Task.update_outputs
class message_key_json(Task.Task):
"""
Task class for creating a JSON file with the message key definitions for the project
"""
def run(self):
"""
This method executes when the message key header task runs
:return: N/A
"""
self.outputs[0].parent.mkdir()
with open(self.outputs[0].abspath(), 'w') as f:
json.dump(self.message_keys, f, sort_keys=True, indent=4, separators=(',', ': '))

View file

@ -0,0 +1,231 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from waflib import Node
from resources.find_resource_filename import find_most_specific_filename
from resources.types.resource_definition import ResourceDefinition
from resources.types.resource_object import ResourceObject
from resources.resource_map import resource_generator
import resources.resource_map.resource_generator_bitmap
import resources.resource_map.resource_generator_font
import resources.resource_map.resource_generator_js
import resources.resource_map.resource_generator_pbi
import resources.resource_map.resource_generator_png
import resources.resource_map.resource_generator_raw
from sdk_helpers import is_sdk_2x, validate_resource_not_larger_than
def _preprocess_resource_ids(bld, resources_list, has_published_media=False):
"""
This method reads all of the defined resources for the project and assigns resource IDs to
them prior to the start of resource processing. This preprocessing step is necessary in order
for the timeline lookup table to contain accurate resource IDs, while still allowing us the
prepend the TLUT as a resource in the resource ball.
:param bld: the BuildContext object
:param resources_list: the list of resources defined for this project
:param has_published_media: boolean for whether publishedMedia exists for the project
:return: None
"""
resource_id_mapping = {}
next_id = 1
if has_published_media:
# The timeline lookup table must be the first resource if one exists
resource_id_mapping['TIMELINE_LUT'] = next_id
next_id += 1
for res_id, res in enumerate(resources_list, start=next_id):
if isinstance(res, Node.Node):
if res.name == 'timeline_resource_table.reso':
continue
res_name = ResourceObject.load(res.abspath()).definition.name
resource_id_mapping[res_name] = res_id
else:
resource_id_mapping[res.name] = res_id
bld.env.RESOURCE_ID_MAPPING = resource_id_mapping
def generate_resources(bld, resource_source_path):
"""
This method creates all of the task generators necessary to handle every possible resource
allowed by the SDK.
:param bld: the BuildContext object
:param resource_source_path: the path from which to retrieve resource files
:return: N/A
"""
resources_json = getattr(bld.env, 'RESOURCES_JSON', [])
published_media_json = getattr(bld.env, 'PUBLISHED_MEDIA_JSON', [])
if resource_source_path:
resources_node = bld.path.find_node(resource_source_path)
else:
resources_node = bld.path.find_node('resources')
resource_file_mapping = {}
for resource in resources_json:
resource_file_mapping[resource['name']] = (
find_most_specific_filename(bld, bld.env, resources_node, resource['file']))
# Load the waftools that handle creating resource objects, a resource pack and the resource
# ID header
bld.load('generate_pbpack generate_resource_ball generate_resource_id_header')
bld.load('process_timeline_resources')
# Iterate over the resource definitions and do some processing to remove resources that
# aren't relevant to the platform we're building for and to apply various backwards
# compatibility adjustments
resource_definitions = []
max_menu_icon_dimensions = (25, 25)
for r in resources_json:
if 'menuIcon' in r and r['menuIcon']:
res_file = (
resources_node.find_node(find_most_specific_filename(bld, bld.env,
resources_node,
str(r['file'])))).abspath()
if not validate_resource_not_larger_than(bld, res_file,
dimensions=max_menu_icon_dimensions):
bld.fatal("menuIcon resource '{}' exceeds the maximum allowed dimensions of {}".
format(r['name'], max_menu_icon_dimensions))
defs = resource_generator.definitions_from_dict(bld, r, resource_source_path)
for d in defs:
if not d.is_in_target_platform(bld):
continue
if d.type == 'png-trans':
# SDK hack for SDK compatibility
# One entry in the media list with the type png-trans actually represents two
# resources, one for the black mask and one for the white mask. They each have
# their own resource ids, so we need two entries in our definitions list.
for suffix in ('WHITE', 'BLACK'):
new_definition = copy.deepcopy(d)
new_definition.name = '%s_%s' % (d.name, suffix)
resource_definitions.append(new_definition)
continue
if d.type == 'png' and is_sdk_2x(bld.env.SDK_VERSION_MAJOR, bld.env.SDK_VERSION_MINOR):
# We don't have png support in the 2.x sdk, instead process these into a pbi
d.type = 'pbi'
resource_definitions.append(d)
bld_dir = bld.path.get_bld().make_node(bld.env.BUILD_DIR)
lib_resources = []
for lib in bld.env.LIB_JSON:
# Skip resource handling if not a Pebble library or if no resources are specified
if 'pebble' not in lib or 'resources' not in lib['pebble']:
continue
if 'media' not in lib['pebble']['resources'] or not lib['pebble']['resources']['media']:
continue
lib_path = bld.path.find_node(lib['path'])
try:
resources_path = lib_path.find_node('resources').find_node(bld.env.PLATFORM_NAME)
except AttributeError:
bld.fatal("Library {} is missing resources".format(lib['name']))
else:
if resources_path is None:
bld.fatal("Library {} is missing resources for the {} platform".
format(lib['name'], bld.env.PLATFORM_NAME))
for lib_resource in bld.env.LIB_RESOURCES_JSON.get(lib['name'], []):
# Skip resources that specify targetPlatforms other than this one
if 'targetPlatforms' in lib_resource:
if bld.env.PLATFORM_NAME not in lib_resource['targetPlatforms']:
continue
reso_file = '{}.{}.reso'.format(lib_resource['file'], lib_resource['name'])
resource_node = resources_path.find_node(reso_file)
if resource_node is None:
bld.fatal("Library {} is missing the {} resource for the {} platform".
format(lib['name'], lib_resource['name'], bld.env.PLATFORM_NAME))
if lib_resource['name'] in resource_file_mapping:
bld.fatal("Duplicate resource IDs are not permitted. Package resource {} uses the "
"same resource ID as another resource already in this project.".
format(lib_resource['name']))
resource_file_mapping[lib_resource['name']] = resource_node
lib_resources.append(resource_node)
resources_list = []
if resource_definitions:
resources_list.extend(resource_definitions)
if lib_resources:
resources_list.extend(lib_resources)
build_type = getattr(bld.env, 'BUILD_TYPE', 'app')
resource_ball = bld_dir.make_node('system_resources.resball')
# If this is a library, generate a resource ball containing only resources provided in this
# project (not additional dependencies)
project_resource_ball = None
if build_type == 'lib':
project_resource_ball = bld_dir.make_node('project_resources.resball')
bld.env.PROJECT_RESBALL = project_resource_ball
if published_media_json:
# Only create TLUT for non-packages
if build_type != 'lib':
timeline_resource_table = bld_dir.make_node('timeline_resource_table.reso')
resources_list.append(timeline_resource_table)
_preprocess_resource_ids(bld, resources_list, True)
bld(features='process_timeline_resources',
published_media=published_media_json,
timeline_reso=timeline_resource_table,
layouts_json=bld_dir.make_node('layouts.json'),
resource_mapping=resource_file_mapping,
vars=['RESOURCE_ID_MAPPING', 'PUBLISHED_MEDIA_JSON'])
# Create resource objects from a set of resource definitions and package them in a resource ball
bld(features='generate_resource_ball',
resources=resources_list,
resource_ball=resource_ball,
project_resource_ball=project_resource_ball,
vars=['RESOURCES_JSON', 'LIB_RESOURCES_JSON', 'RESOURCE_ID_MAPPING'])
# Create a resource ID header for use during the linking step of the build
# FIXME PBL-36458: Since pebble.h requires this file through a #include, this file must be
# present for every project, regardless of whether or not resources exist for the project. At
# this time, this means the `generate_resource_id_header` task generator must run for every
# project. Since the input of the `generate_resource_id_header` task generator is the
# resource ball created by the `generate_resource_ball` task generator, the
# `generate_resource_ball` task generator must also run for every project.
resource_id_header = bld_dir.make_node('src/resource_ids.auto.h')
bld.env.RESOURCE_ID_HEADER = resource_id_header.abspath()
bld(features='generate_resource_id_header',
resource_ball=resource_ball,
resource_id_header_target=resource_id_header,
use_extern=build_type == 'lib',
use_define=build_type == 'app',
published_media=published_media_json)
resource_id_definitions = bld_dir.make_node('src/resource_ids.auto.c')
bld.env.RESOURCE_ID_DEFINITIONS = resource_id_definitions.abspath()
bld(features='generate_resource_id_definitions',
resource_ball=resource_ball,
resource_id_definitions_target=resource_id_definitions,
published_media=published_media_json)
if not bld.env.BUILD_TYPE or bld.env.BUILD_TYPE in ('app', 'rocky'):
# Create a resource pack for distribution with an application binary
pbpack = bld_dir.make_node('app_resources.pbpack')
bld(features='generate_pbpack',
resource_ball=resource_ball,
pbpack_target=pbpack,
is_system=False)

View file

@ -0,0 +1,232 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import struct
from waflib import Node, Task, TaskGen
from waflib.TaskGen import before_method, feature
from resources.types.resource_definition import ResourceDefinition
from resources.types.resource_object import ResourceObject
from sdk_helpers import validate_resource_not_larger_than
class layouts_json(Task.Task):
"""
Task class for generating a layouts JSON file with the timeline/glance resource id mapping for
publishedMedia items
"""
def run(self):
"""
This method executes when the layouts JSON task runs
:return: N/A
"""
published_media_dict = {m['id']: m['name'] for m in self.published_media}
timeline_entries = [{'id': media_id, 'name': media_name} for media_id, media_name in
published_media_dict.iteritems()]
image_uris = {
'resources': {'app://images/' + r['name']: r['id'] for r in timeline_entries}
}
# Write a dictionary (created from map output) to a json file in the build directory
with open(self.outputs[0].abspath(), 'w') as f:
json.dump(image_uris, f, indent=8)
def _collect_lib_published_media(ctx):
"""
Collects all lib-defined publishedMedia objects and provides a list for comparison with app
aliases
:param ctx: the current Context object
:return: a list of all defined publishedMedia items from included packages
"""
published_media = []
for lib in ctx.env.LIB_JSON:
if 'pebble' not in lib or 'resources' not in lib['pebble']:
continue
if 'publishedMedia' not in lib['pebble']['resources']:
continue
published_media.extend(lib['pebble']['resources']['publishedMedia'])
return published_media
class timeline_reso(Task.Task):
"""
Task class for generating a timeline lookup table for publishedMedia items, which is then
packed and packaged as a ResourceObject for later inclusion in a ResourceBall and PBPack
"""
def run(self):
"""
This method executes when the timeline reso task runs
:return: N/A
"""
bld = self.generator.bld
resource_id_mapping = self.env.RESOURCE_ID_MAPPING
TIMELINE_RESOURCE_TABLE_ENTRY_FMT = '<III'
TLUT_SIGNATURE = 'TLUT'
timeline_resources = []
published_media_from_libs = _collect_lib_published_media(self.generator)
# Create a sparse table to represent a c-style array
for item in self.published_media:
timeline_id = item.get('id', None)
published_media_name = item.get('name', None) # string representation of published_id
build_type = self.env.BUILD_TYPE
timeline_tiny_exists = 'timeline' in item and 'tiny' in item['timeline']
if 'glance' in item:
# Alias ['timeline']['tiny'] to ['glance'] if missing, or validate
# ['timeline']['tiny'] == ['glance'] if both exist
if not timeline_tiny_exists:
timeline = item.pop('timeline', {})
timeline.update({'tiny': item['glance']})
item['timeline'] = timeline
elif item['glance'] != item['timeline']['tiny']:
bld.fatal("Resource {} in publishedMedia specifies different values {} and {}"
"for ['glance'] and ['timeline']['tiny'] attributes, respectively. "
"Differing values for these fields are not supported.".
format(item['name'], item['glance'], item['timeline']['tiny']))
else:
if not timeline_tiny_exists:
if 'alias' in item and build_type != 'lib':
# Substitute package-defined publishedMedia item for objects with `alias`
# defined
for definition in published_media_from_libs:
if definition['name'] == item['alias']:
del item['alias']
del definition['name']
item.update(definition)
break
else:
bld.fatal("No resource for alias '{}' exists in installed packages".
format(item['alias']))
else:
bld.fatal("Resource {} in publishedMedia is missing values for ['glance'] "
"and ['timeline']['tiny'].".format(published_media_name))
# Extend table if needed
if timeline_id >= len(timeline_resources):
timeline_resources.extend({'tiny': 0, 'small': 0, 'large': 0} for x in
range(len(timeline_resources), timeline_id + 1))
# Set the resource IDs for this timeline item
for size, res_id in item['timeline'].iteritems():
if res_id not in resource_id_mapping:
bld.fatal("Invalid resource ID {} specified in publishedMedia".format(res_id))
timeline_resources[timeline_id][size] = resource_id_mapping[res_id]
# Serialize the table
table = TLUT_SIGNATURE
for r in timeline_resources:
table += struct.pack(TIMELINE_RESOURCE_TABLE_ENTRY_FMT,
r['tiny'],
r['small'],
r['large'])
r = ResourceObject(ResourceDefinition('raw', 'TIMELINE_LUT', ''), table)
r.dump(self.outputs[0])
def _get_resource_file(ctx, mapping, resource_id, resources_node=None):
try:
resource = mapping[resource_id]
except KeyError:
ctx.bld.fatal("No resource '{}' found for publishedMedia use.".format(resource_id))
if isinstance(resource, Node.Node):
return resource.abspath()
elif resources_node:
return resources_node.find_node(str(resource)).abspath()
else:
return ctx.path.find_node('resources').find_node(str(resource)).abspath()
@feature('process_timeline_resources')
@before_method('generate_resource_ball')
def process_timeline_resources(task_gen):
"""
Process all of the resources listed in the publishedMedia object in project JSON files.
As applicable, generate a layouts.json file for mobile apps to do resource id lookups,
and generate a timeline lookup table for FW to do resource id lookups.
Keyword arguments:
published_media -- A JSON object containing all of the resources defined as publishedMedia in a
project's JSON file
timeline_resource_table -- The name of the file to be used to store the timeline lookup table
layouts_json -- The name of the file to be used to store the JSON timeline/glance resource id
mapping
:param task_gen: the task generator instance
:return: N/A
"""
bld = task_gen.bld
build_type = task_gen.env.BUILD_TYPE
published_media = task_gen.published_media
timeline_resource_table = task_gen.timeline_reso
layouts_json = task_gen.layouts_json
mapping = task_gen.resource_mapping
MAX_SIZES = {
'glance': (25, 25),
'tiny': (25, 25),
'small': (50, 50),
'large': (80, 80)
}
used_ids = []
for item in published_media:
if 'id' not in item:
# Pebble Package builds omit the ID
if build_type == 'lib':
continue
else:
bld.fatal("Missing 'id' attribute for publishedMedia item '{}'".
format(item['name']))
# Check for duplicate IDs
if item['id'] in used_ids:
task_gen.bld.fatal("Cannot specify multiple resources with the same publishedMedia ID. "
"Please modify your publishedMedia items to only use the ID {} once".
format(item['id']))
else:
used_ids.append(item['id'])
# Check for valid resource dimensions
if 'glance' in item:
res_file = _get_resource_file(task_gen, mapping, item['glance'])
if not validate_resource_not_larger_than(task_gen.bld, res_file, MAX_SIZES['glance']):
bld.fatal("publishedMedia item '{}' specifies a resource '{}' for attribute "
"'glance' that exceeds the maximum allowed dimensions of {} x {} for "
"that attribute.".
format(item['name'], mapping[item['glance']], MAX_SIZES['glance'][0],
MAX_SIZES['glance'][1]))
if 'timeline' in item:
for size in ('tiny', 'small', 'large'):
if size in item['timeline']:
res_file = _get_resource_file(task_gen, mapping, item['timeline'][size])
if not validate_resource_not_larger_than(task_gen.bld, res_file,
MAX_SIZES[size]):
bld.fatal("publishedMedia item '{}' specifies a resource '{}' for size '{}'"
" that exceeds the maximum allowed dimensions of {} x {} for "
" that size.".
format(item['name'], mapping[item['timeline'][size]], size,
MAX_SIZES[size][0], MAX_SIZES[size][1]))
timeline_reso_task = task_gen.create_task('timeline_reso',
src=None, tgt=timeline_resource_table)
timeline_reso_task.published_media = published_media
layouts_json_task = task_gen.create_task('layouts_json', src=None, tgt=layouts_json)
layouts_json_task.published_media = published_media

View file

@ -0,0 +1,113 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from waflib import Logs, Task
from waflib.TaskGen import after_method, feature
from binutils import size
from memory_reports import (app_memory_report, app_resource_memory_error,
app_appstore_resource_memory_error,
bytecode_memory_report, simple_memory_report)
from sdk_helpers import is_sdk_2x
class memory_usage_report(Task.Task):
"""
Task class to print a memory usage report for the specified binary and resources, if any
"""
def run(self):
"""
This method executes when the memory usage report task runs
:return: None
"""
bin_type = self.bin_type
platform = self.generator.env.PLATFORM_NAME
if bin_type == 'rocky':
env = self.generator.bld.all_envs[self.env.PLATFORM_NAME]
Logs.pprint(*bytecode_memory_report(platform, env.SNAPSHOT_SIZE, env.SNAPSHOT_MAX))
return
bin_path = self.inputs[0].abspath()
resources_path = self.inputs[1].abspath() if len(self.inputs) > 1 else None
max_ram, max_resources, max_appstore_resources = self.max_sizes
# Handle zero-size binaries (more common with packages)
ram_size = sum(size(bin_path)) if size(bin_path) != 0 else 0
resource_size = os.stat(resources_path).st_size if resources_path else None
if resource_size and max_resources and max_appstore_resources:
if resource_size > max_resources:
Logs.pprint(*app_appstore_resource_memory_error(platform, resource_size,
max_resources))
return -1
elif resource_size > max_appstore_resources:
Logs.pprint(*app_appstore_resource_memory_error(platform, resource_size,
max_appstore_resources))
if max_ram:
# resource_size and max_appstore_resources are optional
free_ram = max_ram - ram_size
Logs.pprint(*app_memory_report(platform, bin_type, ram_size, max_ram,
free_ram, resource_size, max_appstore_resources))
else:
# resource_size is optional
Logs.pprint(*simple_memory_report(platform, ram_size, resource_size))
@feature('memory_usage')
@after_method('cprogram', 'cstlib', 'process_rocky_js')
def generate_memory_usage_report(task_gen):
"""
Generates and prints a report of the project's memory usage (binary + resources, if applicable).
Keyword arguments:
app -- The path to the app elf file, if this is an app being evaluated
worker - The path to the worker elf file, if this is a worker being evaluated
lib - The path to the library archive file, if this is a library being evaluated
resources - The path to the resource pack or resource ball, if resources exist for this bin_type
:param task_gen: the task generator instance
:return: None
"""
app, worker, lib, resources = (getattr(task_gen, attr, None)
for attr in ('app', 'worker', 'lib', 'resources'))
max_resources = task_gen.env.PLATFORM["MAX_RESOURCES_SIZE"]
max_resources_appstore = task_gen.env.PLATFORM["MAX_RESOURCES_SIZE_APPSTORE"]
app_max_ram = task_gen.env.PLATFORM["MAX_APP_MEMORY_SIZE"] if app else None
worker_max_ram = task_gen.env.PLATFORM["MAX_WORKER_MEMORY_SIZE"] if worker else None
if app:
app_task = task_gen.create_task('memory_usage_report',
[task_gen.to_nodes(app)[0],
task_gen.to_nodes(resources)[0]])
app_task.max_sizes = (app_max_ram, max_resources, max_resources_appstore)
app_task.bin_type = 'app'
if worker:
worker_task = task_gen.create_task('memory_usage_report',
task_gen.to_nodes(worker)[0])
worker_task.max_sizes = (worker_max_ram, None, None)
worker_task.bin_type = 'worker'
if lib:
lib_task = task_gen.create_task('memory_usage_report',
[task_gen.to_nodes(lib)[0],
task_gen.to_nodes(resources)[0]])
lib_task.max_sizes = (None, None, None)
lib_task.bin_type = 'lib'
if getattr(task_gen, 'bin_type', None) == 'rocky':
rocky_task = task_gen.create_task('memory_usage_report', task_gen.env.JS_RESO)
rocky_task.bin_type = 'rocky'
rocky_task.vars = ['PLATFORM_NAME']

395
sdk/waftools/sdk_helpers.py Normal file
View file

@ -0,0 +1,395 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import struct
import re
from waflib import Logs
from pebble_package import LibraryPackage
from pebble_sdk_platform import pebble_platforms, maybe_import_internal
from pebble_sdk_version import set_env_sdk_version
from resources.types.resource_object import ResourceObject
def _get_pbi_size(data):
"""
This method takes resource data and determines the dimensions of the pbi
:param data: the data contained in the pbi, starting at the header
:return: tuple containing the width and height of the pbi
"""
# Read the first byte at header offset 0x08 for width
width = struct.unpack('<h', data[8:10])[0]
# Read the next 2 bytes after the width to get the height
height = struct.unpack('<h', data[10:12])[0]
return width, height
def _get_pdc_size(data):
"""
This method takes resource data and determines the dimensions of the PDC
:param data: the data contained in the PDC, starting at the header
:return: tuple containing the width and height of the PDC
"""
# Read the first 2 bytes at header offset 0x06 for width
width = struct.unpack('>I', data[6:8])[0]
# Read the next 2 bytes after the width to get the height
height = struct.unpack('>I', data[8:10])[0]
return width, height
def _get_png_size(data):
"""
This resource takes resource data and determines the dimensions of the PNG
:param data: the data contained in the PNG, starting at the IHDR
:return: tuple containing the width and height of the PNG
"""
# Assert that this is the IHDR header
assert data[:4] == 'IHDR'
# Read the first 4 bytes after IHDR for width
width = struct.unpack('>I', data[4:8])[0]
# Read the next 4 bytes after the width to get the height
height = struct.unpack('>I', data[8:12])[0]
return width, height
def _get_supported_platforms(ctx, has_rocky=False):
"""
This method returns all of the supported SDK platforms, based off of SDK requirements found on
the filesystem
:param ctx: the Context object
:return: a list of the platforms that are supported for the given SDK
"""
sdk_check_nodes = ['lib/libpebble.a',
'pebble_app.ld.template',
'tools',
'include',
'include/pebble.h']
supported_platforms = os.listdir(ctx.env.PEBBLE_SDK_ROOT)
invalid_platforms = []
for platform in supported_platforms:
pebble_sdk_platform = ctx.root.find_node(ctx.env.PEBBLE_SDK_ROOT).find_node(platform)
for node in sdk_check_nodes:
if pebble_sdk_platform.find_node(node) is None:
if ctx.root.find_node(ctx.env.PEBBLE_SDK_COMMON).find_node(node) is None:
invalid_platforms.append(platform)
break
for platform in invalid_platforms:
supported_platforms.remove(platform)
if has_rocky and 'aplite' in supported_platforms:
supported_platforms.remove('aplite')
ctx.env.SUPPORTED_PLATFORMS = supported_platforms
return supported_platforms
def append_to_attr(self, attr, new_values):
"""
This helper method appends `new_values` to `attr` on the object `self`
:param self: the object
:param attr: the attribute to modify
:param new_values: the value(s) to set on the attribute
:return: N/A
"""
values = self.to_list(getattr(self, attr, []))
if not isinstance(new_values, list):
new_values = [new_values]
values.extend(new_values)
setattr(self, attr, values)
def configure_libraries(ctx, libraries):
dependencies = libraries.keys()
lib_json = []
lib_resources_json = {}
index = 0
while index < len(dependencies):
info, resources, additional_deps = process_package(ctx, dependencies[index])
lib_json.append(info)
lib_resources_json[dependencies[index]] = resources
dependencies.extend(additional_deps)
index += 1
# Store package.json info for each library and add resources to an environment variable for
# dependency-checking
ctx.env.LIB_JSON = lib_json
if lib_resources_json:
ctx.env.LIB_RESOURCES_JSON = lib_resources_json
def configure_platform(ctx, platform):
"""
Configure a build for the <platform> specified
:param ctx: the ConfigureContext
:param platform: the hardware platform this build is being targeted for
:return: N/A
"""
pebble_sdk_root = get_node_from_abspath(ctx, ctx.env.PEBBLE_SDK_ROOT)
ctx.env.PLATFORM = pebble_platforms[platform]
ctx.env.PEBBLE_SDK_PLATFORM = pebble_sdk_root.find_node(str(platform)).abspath()
ctx.env.PLATFORM_NAME = ctx.env.PLATFORM['NAME']
for attribute in ['DEFINES']: # Attributes with list values
ctx.env.append_unique(attribute, ctx.env.PLATFORM[attribute])
for attribute in ['BUILD_DIR', 'BUNDLE_BIN_DIR']: # Attributes with a single value
ctx.env[attribute] = ctx.env.PLATFORM[attribute]
ctx.env.append_value('INCLUDES', ctx.env.BUILD_DIR)
ctx.msg("Found Pebble SDK for {} in:".format(platform), ctx.env.PEBBLE_SDK_PLATFORM)
process_info = (
pebble_sdk_root.find_node(str(platform)).find_node('include/pebble_process_info.h'))
set_env_sdk_version(ctx, process_info)
if is_sdk_2x(ctx.env.SDK_VERSION_MAJOR, ctx.env.SDK_VERSION_MINOR):
ctx.env.append_value('DEFINES', "PBL_SDK_2")
else:
ctx.env.append_value('DEFINES', "PBL_SDK_3")
ctx.load('pebble_sdk_gcc')
def find_sdk_component(ctx, env, component):
"""
This method finds an SDK component, either in the platform SDK folder, or the 'common' folder
:param ctx: the Context object
:param env: the environment which contains platform SDK folder path for the current platform
:param component: the SDK component being sought
:return: the path to the SDK component being sought
"""
return (ctx.root.find_node(env.PEBBLE_SDK_PLATFORM).find_node(component) or
ctx.root.find_node(env.PEBBLE_SDK_COMMON).find_node(component))
def get_node_from_abspath(ctx, path):
return ctx.root.make_node(path)
def get_target_platforms(ctx):
"""
This method returns a list of target platforms for a build, by comparing the list of requested
platforms to the list of supported platforms, returning all of the supported platforms if no
specific platforms are requested
:param ctx: the Context object
:return: list of target platforms for the build
"""
supported_platforms = _get_supported_platforms(ctx, ctx.env.BUILD_TYPE == 'rocky')
if not ctx.env.REQUESTED_PLATFORMS:
target_platforms = supported_platforms
else:
target_platforms = list(set(supported_platforms) & set(ctx.env.REQUESTED_PLATFORMS))
if not target_platforms:
ctx.fatal("No valid targetPlatforms specified in appinfo.json. Valid options are {}"
.format(supported_platforms))
ctx.env.TARGET_PLATFORMS = sorted([p.encode('utf-8') for p in target_platforms], reverse=True)
return target_platforms
def is_sdk_2x(major, minor):
"""
This method checks if a <major>.<minor> API version are associated with a 2.x version of the SDK
:param major: the major API version to check
:param minor: the minor API version to check
:return: boolean representing whether a 2.x SDK is being used or not
"""
LAST_2X_MAJOR_VERSION = 5
LAST_2X_MINOR_VERSION = 19
return (major, minor) <= (LAST_2X_MAJOR_VERSION, LAST_2X_MINOR_VERSION)
def process_package(ctx, package, root_lib_node=None):
"""
This method parses the package.json for a given package and returns relevant information
:param ctx: the Context object
:param root_lib_node: node containing the package to be processed, if not the standard LIB_DIR
:param package: the package to parse information for
:return:
- a dictionary containing the contents of package.json
- a dictionary containing the resources object for the package
- a list of dependencies for this package
"""
resources_json = {}
if not root_lib_node:
root_lib_node = ctx.path.find_node(ctx.env.LIB_DIR)
if root_lib_node is None:
ctx.fatal("Missing {} directory".format(ctx.env.LIB_DIR))
lib_node = root_lib_node.find_node(str(package))
if lib_node is None:
ctx.fatal("Missing library for {} in {}".format(str(package), ctx.env.LIB_DIR))
else:
libinfo_node = lib_node.find_node('package.json')
if libinfo_node is None:
ctx.fatal("Missing package.json for {} library".format(str(package)))
else:
if lib_node.find_node(ctx.env.LIB_DIR):
error_str = ("ERROR: Multiple versions of the same package are not supported by "
"the Pebble SDK due to namespace issues during linking. Package '{}' "
"contains the following duplicate and incompatible dependencies, "
"which may lead to additional build errors and/or unpredictable "
"runtime behavior:\n".format(package))
packages_str = ""
for package in lib_node.find_node(ctx.env.LIB_DIR).ant_glob('**/package.json'):
with open(package.abspath()) as f:
info = json.load(f)
if not dict(ctx.env.PROJECT_INFO).get('enableMultiJS', False):
if not 'pebble' in info:
continue
packages_str += " '{}': '{}'\n".format(info['name'], info['version'])
if packages_str:
Logs.pprint("RED", error_str + packages_str)
with open(libinfo_node.abspath()) as f:
libinfo = json.load(f)
if 'pebble' in libinfo:
if ctx.env.BUILD_TYPE == 'rocky':
ctx.fatal("Packages containing C binaries are not compatible with Rocky.js "
"projects. Please remove '{}' from the `dependencies` object in "
"package.json".format(libinfo['name']))
libinfo['path'] = lib_node.make_node('dist').path_from(ctx.path)
if 'resources' in libinfo['pebble']:
if 'media' in libinfo['pebble']['resources']:
resources_json = libinfo['pebble']['resources']['media']
# Extract package into "dist" folder
dist_node = lib_node.find_node('dist.zip')
if not dist_node:
ctx.fatal("Missing dist.zip file for {}. Are you sure this is a Pebble "
"library?".format(package))
lib_package = LibraryPackage(dist_node.abspath())
lib_package.unpack(libinfo['path'])
lib_js_node = lib_node.find_node('dist/js')
if lib_js_node:
libinfo['js_paths'] = [lib_js.path_from(ctx.path) for lib_js in
lib_js_node.ant_glob(['**/*.js', '**/*.json'])]
else:
libinfo['js_paths'] = [lib_js.path_from(ctx.path) for lib_js in
lib_node.ant_glob(['**/*.js', '**/*.json'],
excl="**/*.min.js")]
dependencies = libinfo['dependencies'].keys() if 'dependencies' in libinfo else []
return libinfo, resources_json, dependencies
def truncate_to_32_bytes(name):
"""
This method takes an input string and returns a 32-byte truncated string if the input string is
longer than 32 bytes
:param name: the string to truncate
:return: the truncated string, if the input string was > 32 bytes, or else the original input
string
"""
return name[:30] + '..' if len(name) > 32 else name
def validate_message_keys_object(ctx, project_info, info_json_type):
"""
Verify that the appropriately-named message key object is present in the project info file
:param ctx: the ConfigureContext object
:param project_info: JSON object containing project info
:param info_json_type: string containing the name of the file used to extract project info
:return: N/A
"""
if 'appKeys' in project_info and info_json_type == 'package.json':
ctx.fatal("Project contains an invalid object `appKeys` in package.json. Please use "
"`messageKeys` instead.")
if 'messageKeys' in project_info and info_json_type == 'appinfo.json':
ctx.fatal("Project contains an invalid object `messageKeys` in appinfo.json. Please use "
"`appKeys` instead.")
def validate_resource_not_larger_than(ctx, resource_file, dimensions=None, width=None, height=None):
"""
This method takes a resource file and determines whether the file's dimensions exceed the
maximum allowed values provided.
:param resource_file: the path to the resource file
:param dimensions: tuple specifying max width and height
:param width: number specifying max width
:param height: number specifying max height
:return: boolean for whether the resource is larger than the maximum allowed size
"""
if not dimensions and not width and not height:
raise TypeError("Missing values for maximum width and/or height to validate against")
if dimensions:
width, height = dimensions
with open(resource_file, 'rb') as f:
if resource_file.endswith('.reso'):
reso = ResourceObject.load(resource_file)
if reso.definition.type == 'bitmap':
storage_format = reso.definition.storage_format
else:
storage_format = reso.definition.type
if storage_format == 'pbi':
resource_size = _get_pbi_size(reso.data)
elif storage_format == 'png':
resource_size = _get_png_size(reso.data[12:])
elif storage_format == 'raw':
try:
assert reso.data[4:] == 'PDCI'
except AssertionError:
ctx.fatal("Unsupported published resource type for {}".format(resource_file))
else:
resource_size = _get_pdc_size(reso.data[4:])
else:
data = f.read(24)
if data[1:4] == 'PNG':
resource_size = _get_png_size(data[12:])
elif data[:4] == 'PDCI':
resource_size = _get_pdc_size(data[4:])
else:
ctx.fatal("Unsupported published resource type for {}".format(resource_file))
if width and height:
return resource_size <= (width, height)
elif width:
return resource_size[0] <= width
elif height:
return resource_size[1] <= height
def wrap_task_name_with_platform(self):
"""
This method replaces the existing waf Task class's __str__ method with the original content
of the __str__ method, as well as an additional "<platform> | " before the task information,
if a platform is set.
:param self: the task instance
:return: the user-friendly string to print
"""
src_str = ' '.join([a.nice_path() for a in self.inputs])
tgt_str = ' '.join([a.nice_path() for a in self.outputs])
sep = ' -> ' if self.outputs else ''
name = self.__class__.__name__.replace('_task', '')
# Modification to the original __str__ method
if self.env.PLATFORM_NAME:
name = self.env.PLATFORM_NAME + " | " + name
return '%s: %s%s%s\n' % (name, src_str, sep, tgt_str)

27
sdk/waftools/sdk_paths.py Normal file
View file

@ -0,0 +1,27 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is used to import any paths required by the SDK file structure for building Pebble
projects. Even though this script is not specifically a waftool, we benefit from bundling it
together with the other waftools because it automatically gets included in the search path used for
imports by other waftools.
"""
import os
import sys
sdk_root_dir = os.path.dirname(sys.path[0])
sys.path.append(os.path.join(sdk_root_dir, 'common/waftools'))
sys.path.append(os.path.join(sdk_root_dir, 'common/tools'))