Import of the watch repository from Pebble

This commit is contained in:
Matthieu Jeanson 2024-12-12 16:43:03 -08:00 committed by Katharine Berry
commit 3b92768480
10334 changed files with 2564465 additions and 0 deletions

2
sdk/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
# the waf script in the waf folder is dynamically built
waf/waf

1787
sdk/Doxyfile-SDK.template Normal file

File diff suppressed because it is too large Load diff

21
sdk/defaults/app/index.js Normal file
View file

@ -0,0 +1,21 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Pebble.addEventListener("ready",
function(e) {
console.log("Hello world! - Sent from your javascript application.");
}
);

76
sdk/defaults/app/main.c Normal file
View file

@ -0,0 +1,76 @@
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <pebble.h>
static Window *s_window;
static TextLayer *s_text_layer;
static void prv_select_click_handler(ClickRecognizerRef recognizer, void *context) {
text_layer_set_text(s_text_layer, "Select");
}
static void prv_up_click_handler(ClickRecognizerRef recognizer, void *context) {
text_layer_set_text(s_text_layer, "Up");
}
static void prv_down_click_handler(ClickRecognizerRef recognizer, void *context) {
text_layer_set_text(s_text_layer, "Down");
}
static void prv_click_config_provider(void *context) {
window_single_click_subscribe(BUTTON_ID_SELECT, prv_select_click_handler);
window_single_click_subscribe(BUTTON_ID_UP, prv_up_click_handler);
window_single_click_subscribe(BUTTON_ID_DOWN, prv_down_click_handler);
}
static void prv_window_load(Window *window) {
Layer *window_layer = window_get_root_layer(window);
GRect bounds = layer_get_bounds(window_layer);
s_text_layer = text_layer_create(GRect(0, 72, bounds.size.w, 20));
text_layer_set_text(s_text_layer, "Press a button");
text_layer_set_text_alignment(s_text_layer, GTextAlignmentCenter);
layer_add_child(window_layer, text_layer_get_layer(s_text_layer));
}
static void prv_window_unload(Window *window) {
text_layer_destroy(s_text_layer);
}
static void prv_init(void) {
s_window = window_create();
window_set_click_config_provider(s_window, prv_click_config_provider);
window_set_window_handlers(s_window, (WindowHandlers) {
.load = prv_window_load,
.unload = prv_window_unload,
});
const bool animated = true;
window_stack_push(s_window, animated);
}
static void prv_deinit(void) {
window_destroy(s_window);
}
int main(void) {
prv_init();
APP_LOG(APP_LOG_LEVEL_DEBUG, "Done initializing, pushed window: %p", s_window);
app_event_loop();
prv_deinit();
}

View file

@ -0,0 +1,29 @@
{
"name": "${project_name}",
"author": "MakeAwesomeHappen",
"version": "1.0.0",
"keywords": ["pebble-app"],
"private": true,
"dependencies": {},
"pebble": {
"displayName": "${display_name}",
"uuid": "${uuid}",
"sdkVersion": "${sdk_version}",
"enableMultiJS": true,
"targetPlatforms": [
"aplite",
"basalt",
"chalk",
"diorite"
],
"watchapp": {
"watchface": false
},
"messageKeys": [
"dummy"
],
"resources": {
"media": []
}
}
}

21
sdk/defaults/app/simple.c Normal file
View file

@ -0,0 +1,21 @@
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <pebble.h>
int main(void) {
app_event_loop();
}

21
sdk/defaults/app/worker.c Normal file
View file

@ -0,0 +1,21 @@
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <pebble_worker.h>
int main(void) {
worker_event_loop();
}

54
sdk/defaults/app/wscript Normal file
View file

@ -0,0 +1,54 @@
#
# This file is the default set of rules to compile a Pebble application.
#
# Feel free to customize this to your needs.
#
import os.path
top = '.'
out = 'build'
def options(ctx):
ctx.load('pebble_sdk')
def configure(ctx):
"""
This method is used to configure your build. ctx.load(`pebble_sdk`) automatically configures
a build for each valid platform in `targetPlatforms`. Platform-specific configuration: add your
change after calling ctx.load('pebble_sdk') and make sure to set the correct environment first.
Universal configuration: add your change prior to calling ctx.load('pebble_sdk').
"""
ctx.load('pebble_sdk')
def build(ctx):
ctx.load('pebble_sdk')
build_worker = os.path.exists('worker_src')
binaries = []
cached_env = ctx.env
for platform in ctx.env.TARGET_PLATFORMS:
ctx.env = ctx.all_envs[platform]
ctx.set_group(ctx.env.PLATFORM_NAME)
app_elf = '{}/pebble-app.elf'.format(ctx.env.BUILD_DIR)
ctx.pbl_build(source=ctx.path.ant_glob('src/c/**/*.c'), target=app_elf, bin_type='app')
if build_worker:
worker_elf = '{}/pebble-worker.elf'.format(ctx.env.BUILD_DIR)
binaries.append({'platform': platform, 'app_elf': app_elf, 'worker_elf': worker_elf})
ctx.pbl_build(source=ctx.path.ant_glob('worker_src/c/**/*.c'),
target=worker_elf,
bin_type='worker')
else:
binaries.append({'platform': platform, 'app_elf': app_elf})
ctx.env = cached_env
ctx.set_group('bundle')
ctx.pbl_bundle(binaries=binaries,
js=ctx.path.ant_glob(['src/pkjs/**/*.js',
'src/pkjs/**/*.json',
'src/common/**/*.js']),
js_entry_file='src/pkjs/index.js')

View file

@ -0,0 +1,10 @@
# Ignore build generated files
build/
dist/
dist.zip
# Ignore waf lock file
.lock-waf*
# Ignore installed node modules
node_modules/

22
sdk/defaults/lib/lib.c Normal file
View file

@ -0,0 +1,22 @@
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <pebble.h>
#include "${project_name}.h"
bool ${project_name_c}_find_truth(void) {
return true;
}

19
sdk/defaults/lib/lib.h Normal file
View file

@ -0,0 +1,19 @@
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
bool ${project_name_c}_find_truth(void);

21
sdk/defaults/lib/lib.js Normal file
View file

@ -0,0 +1,21 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var run_me = function(e) {
console.log("Look at me, I'm running!");
};
module.exports = run_me;

View file

@ -0,0 +1,21 @@
{
"name": "${project_name}",
"author": "MakeAwesomeHappen",
"version": "1.0.0",
"files": ["dist.zip"],
"keywords": ["pebble-package"],
"dependencies": {},
"pebble": {
"projectType": "package",
"sdkVersion": "${sdk_version}",
"targetPlatforms": [
"aplite",
"basalt",
"chalk",
"diorite"
],
"resources": {
"media": []
}
}
}

48
sdk/defaults/lib/wscript Normal file
View file

@ -0,0 +1,48 @@
#
# This file is the default set of rules to compile a Pebble project.
#
# Feel free to customize this to your needs.
#
import os
import shutil
import waflib
top = '.'
out = 'build'
def distclean(ctx):
if os.path.exists('dist.zip'):
os.remove('dist.zip')
if os.path.exists('dist'):
shutil.rmtree('dist')
waflib.Scripting.distclean(ctx)
def options(ctx):
ctx.load('pebble_sdk_lib')
def configure(ctx):
ctx.load('pebble_sdk_lib')
def build(ctx):
ctx.load('pebble_sdk_lib')
cached_env = ctx.env
for platform in ctx.env.TARGET_PLATFORMS:
ctx.env = ctx.all_envs[platform]
ctx.set_group(ctx.env.PLATFORM_NAME)
lib_name = '{}/{}'.format(ctx.env.BUILD_DIR, ctx.env.PROJECT_INFO['name'])
ctx.pbl_build(source=ctx.path.ant_glob('src/c/**/*.c'), target=lib_name, bin_type='lib')
ctx.env = cached_env
ctx.set_group('bundle')
ctx.pbl_bundle(includes=ctx.path.ant_glob('include/**/*.h'),
js=ctx.path.ant_glob(['src/js/**/*.js', 'src/js/**/*.json']),
bin_type='lib')
if ctx.cmd == 'clean':
for n in ctx.path.ant_glob(['dist/**/*', 'dist.zip'], quiet=True):
n.delete()

20
sdk/defaults/rocky/app.js Normal file
View file

@ -0,0 +1,20 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// https://developer.pebble.com/docs/pebblekit-js/Pebble/#on
Pebble.on('message', function(event) {
console.log('Message received from watch:', event.data);
});

View file

@ -0,0 +1,84 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var rocky = require('rocky');
// An object to cache our date & time values,
// to minimize computations in the draw handler.
var clockData = {
time: '',
date: ''
};
// Every minute
// https://developer.pebble.com/docs/rockyjs/rocky/#on
rocky.on('minutechange', function(event) {
// Current date/time
// https://developer.pebble.com/docs/rockyjs/Date/
var d = event.date;
// Get current time, based on 12h or 24h format (01:00 or 1:00 AM)
clockData.time = d.toLocaleTimeString().replace(/:\d+($$| )/, '$$1');
// Day of month
var day = d.toLocaleDateString(undefined, ({day: 'numeric'}));
// Month name
var month = d.toLocaleDateString(undefined, ({month: 'long'}));
// Date
clockData.date = (day + ' ' + month);
// Force screen redraw
rocky.requestDraw();
});
// Redraw the screen
rocky.on('draw', function(event) {
// Drawing canvas
var ctx = event.context;
// Clear the canvas
// https://developer.pebble.com/docs/rockyjs/CanvasRenderingContext2D/#Canvas
ctx.clearRect(0, 0, ctx.canvas.clientWidth, ctx.canvas.clientHeight);
// UnobstructedArea
// https://developer.pebble.com/docs/rockyjs/CanvasRenderingContext2D/#Canvas
var offsetY = (ctx.canvas.clientHeight - ctx.canvas.unobstructedHeight) / 2;
var centerX = ctx.canvas.unobstructedWidth / 2;
// Text formatting
ctx.fillStyle = 'white';
ctx.textAlign = 'center';
// Time font
// https://developer.pebble.com/docs/rockyjs/CanvasRenderingContext2D/#font
ctx.font = '26px bold Leco-numbers-am-pm';
// Time
ctx.fillText(clockData.time, centerX, (66 - offsetY));
// Date font
ctx.font = '18px bold Gothic';
// Date
ctx.fillText(clockData.date, centerX, (94 - offsetY));
});
// Send a single message to the Phone
// https://developer.pebble.com/docs/rockyjs/rocky/#postMessage
rocky.postMessage("This arrives on the phone via bluetooth!");

View file

@ -0,0 +1,25 @@
{
"name": "${project_name}",
"author": "MakeAwesomeHappen",
"version": "1.0.0",
"keywords": ["pebble-app"],
"private": true,
"dependencies": {},
"pebble": {
"main": {
"rockyjs": "src/rocky/index.js",
"pkjs": "src/pkjs/index.js"
},
"displayName": "${display_name}",
"uuid": "${uuid}",
"projectType": "rocky",
"sdkVersion": "${sdk_version}",
"enableMultiJS": true,
"watchapp": {
"watchface": true
},
"resources": {
"media": []
}
}
}

View file

@ -0,0 +1,30 @@
#
# This file is the default set of rules to compile a Pebble application.
#
# Feel free to customize this to your needs.
#
top = '.'
out = 'build'
def options(ctx):
ctx.load('pebble_sdk')
def configure(ctx):
"""
This method is used to configure your build. ctx.load(`pebble_sdk`) automatically configures
a build for each valid platform in `targetPlatforms`. Platform-specific configuration: add your
change after calling ctx.load('pebble_sdk') and make sure to set the correct environment first.
Universal configuration: add your change prior to calling ctx.load('pebble_sdk').
"""
ctx.load('pebble_sdk')
def build(ctx):
ctx.load('pebble_sdk')
ctx.pbl_bundle(js=ctx.path.ant_glob(['src/pkjs/**/*.js',
'src/pkjs/**/*.json',
'src/common/**/*.js']),
js_entry_file='src/pkjs/index.js',
bin_type='rocky')

View file

@ -0,0 +1,42 @@
{
"default": {
".gitignore": "gitignore"
},
"rocky": {
"default": {
"src/pkjs/index.js": "rocky/app.js",
"src/rocky/index.js": "rocky/index.js",
"wscript": "rocky/wscript",
"package.json": "rocky/package.json"
}
},
"app": {
"default": {
"src/c/${project_name}.c": "app/main.c",
"wscript": "app/wscript",
"package.json": "app/package.json",
"resources": null
},
"worker": {
"worker_src/c/${project_name}_worker.c": "app/worker.c"
},
"simple": {
"src/c/${project_name}.c": "app/simple.c"
},
"javascript": {
"src/pkjs/index.js": "app/index.js"
}
},
"lib": {
"default": {
"src/c/${project_name}.c": "lib/lib.c",
"include/${project_name}.h": "lib/lib.h",
"wscript": "lib/wscript",
"package.json": "lib/package.json",
"src/resources": null
},
"javascript": {
"src/js/index.js": "lib/lib.js"
}
}
}

0
sdk/include/.gitignore vendored Normal file
View file

View file

@ -0,0 +1,22 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
module.exports = function(module) {
switch(module) {
case "message_keys": return require("message_keys");
}
throw new Error('Module not found: ' + module);
};

View file

@ -0,0 +1,705 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function() {
var utf8 = require('utf8');
var POSTMESSAGE_DEBUG = false;
// Super simple polyfill for Array.from() that only deals with a Uint8Array:
var arrayFromUint8Array = Array.from ? Array.from : function(uint8Array) {
return [].slice.call(uint8Array);
};
function debugLog() {
if (POSTMESSAGE_DEBUG) {
console.log.apply(console, arguments);
}
}
function createHandlersList() {
var pos = 0;
var handlers = [];
return {
add : function(handler) {
handlers.push(handler);
},
clear : function() {
handlers = [];
pos = 0;
},
isEmpty : function() {
return (handlers.length == 0);
},
remove : function(handler) {
var idx = handlers.indexOf(handler);
if (idx < 0) { return; } // Not registered
if (idx < pos) { pos = Math.max(pos - 1, 0); } // We've iterated past it, and it's been removed
handlers.splice(idx, 1);
},
newIterator : function() {
pos = 0; // new iterator, reset position
return {
next : function() {
if (pos < handlers.length) {
return handlers[pos++];
} else {
return undefined;
}
}
}
}
}
}
var EVENTS = {};
var _callHandler = function(handler, event_name, callback_data) {
var msg = { type: event_name };
if (callback_data !== undefined) {
msg.data = callback_data;
}
handler(msg);
};
var _callHandlersForEvent = function(event_name, callback_data) {
var handler;
if (!(event_name in EVENTS)) {
return;
}
var it = EVENTS[event_name].newIterator();
while ((handler = it.next())) {
_callHandler(handler, event_name, callback_data);
}
}
var _isPostMessageEvent = function(event_name) {
return (['message', 'postmessageerror',
'postmessageconnected', 'postmessagedisconnected'].indexOf(event_name)) > -1;
}
var __Pebble = Pebble;
// Create a new object with its prototype pointing to the original, using
// Object.create(). This way, we can rely on JavaScript's prototype chain
// traversal to make all properties on the original object "just work".
// Note however, that these won't be "own properties", so when using
// `for .. in`, Pebble.keys(), Object.getOwnPropertyNames(), etc. these
// "delegated properties" will not be found.
Pebble = Object.create(Pebble);
for (var attr in __Pebble) {
if (!__Pebble.hasOwnProperty(attr)) {
continue;
}
// Attributes of Pebble which can be bound, should be bound to the original object
if (__Pebble[attr].bind) {
Pebble[attr] = __Pebble[attr].bind(__Pebble);
} else {
Pebble[attr] = __Pebble[attr];
}
}
// Ensure that all exported functions exist.
["addEventListener", "removeEventListener", "showSimpleNotificationOnPebble",
"sendAppMessage", "getTimelineToken", "timelineSubscribe",
"timelineUnsubscribe", "timelineSubscriptions", "getActiveWatchInfo",
"getAccountToken", "getWatchToken", "appGlanceReload"].forEach(
function(elem, idx, arr) {
if ((elem in Pebble) || ((typeof __Pebble[elem]) !== 'function')) {
// This function has already been copied over or doesn't actually exist.
return;
}
Pebble[elem] = __Pebble[elem].bind(__Pebble);
}
);
// sendAppMessage is not supported, make it undefined so a user will get a
// "not a function" error, and can check `typeof Pebble.sendAppMessage === 'function'`
// to test for support.
Pebble["sendAppMessage"] = undefined;
// The rocky implementation!
function _scheduleAsyncPostMessageError(jsonString, reason) {
_callHandlersForEvent('postmessageerror', JSON.parse(jsonString));
console.error("postMessage() failed. Reason: " + reason);
}
Pebble.postMessage = function(obj) {
_out.sendObject(obj);
};
var on = function(event_name, handler) {
if (typeof(handler) !== 'function') {
throw TypeError("Handler for event expected, received " + typeof(handler));
}
if (!(event_name in EVENTS)) {
EVENTS[event_name] = createHandlersList();
}
EVENTS[event_name].add(handler);
if ((event_name == "postmessageconnected" && _control.state == ControlStateSessionOpen) ||
(event_name == "postmessagedisconnected" && _control.state != ControlStateSessionOpen)) {
_callHandler(handler, event_name);
}
};
Pebble.addEventListener = function(event_name, handler) {
if (_isPostMessageEvent(event_name)) {
return on(event_name, handler);
} else if (event_name == 'appmessage') {
throw Error("App Message not supported with Rocky.js apps. See Pebble.postMessage()");
} else {
return __Pebble.addEventListener(event_name, handler);
}
};
// Alias to the overridden implementation:
Pebble.on = Pebble.addEventListener;
var off = function(event_name, handler) {
if (handler === undefined) {
throw TypeError('Not enough arguments (missing handler)');
}
if (event_name in EVENTS) {
EVENTS[event_name].remove(handler);
}
}
Pebble.removeEventListener = function(event_name, handler) {
if (_isPostMessageEvent(event_name)) {
off(event_name, handler);
} else {
return __Pebble.removeEventListener(event_name, handler);
}
}
// Alias to the overridden implementation:
Pebble.off = Pebble.removeEventListener;
/*********************************************************************************
* postMessage(): Outbound object and control message queuing, sending & chunking.
********************************************************************************/
var _out = new Sender();
function Sender() {
this.controlQueue = [];
this.objectQueue = [];
this._currentMessageType = undefined;
this._failureCount = 0;
this._offsetBytes = 0;
this._chunkPayloadSize = 0;
this._resetCurrent = function() {
this._currentMessageType = undefined;
this._failureCount = 0;
this._offsetBytes = 0;
this._chunkPayloadSize = 0;
};
this._getNextMessageType = function() {
if (this.controlQueue.length > 0) {
return "control";
} else if (this.objectQueue.length > 0) {
return "object";
}
// No messages remaining
return undefined;
};
// Begin sending the next prioritized message
this._sendNext = function() {
if (this._currentMessageType !== undefined) {
return; // Already something in flight
}
var type = this._getNextMessageType();
if (type === undefined) {
return; // No message to send
}
if (type === "control") {
this._currentMessageType = type;
this._trySendNextControl();
} else if (type === "object") {
this._currentMessageType = type;
this._trySendNextChunk();
}
};
//////////////////////////////////////////////////////////////////////////////
// Sender: Control Message Handling
//////////////////////////////////////////////////////////////////////////////
this._controlSuccess = function() {
this.controlQueue.shift();
this._resetCurrent();
this._sendNext();
};
this._controlFailure = function(e) {
this._failureCount++;
var willRetry = (this._failureCount <= 3);
if (willRetry) {
setTimeout(this._trySendNextControl.bind(this), 1000); // 1s retry
} else {
debugLog("Failed to send control message: " + e +
", entering disconnected state.");
this.controlQueue.shift();
this._resetCurrent();
_control.enter(ControlStateDisconnected);
this._sendNext();
}
};
this._trySendNextControl = function() {
var msg = this.controlQueue[0];
__Pebble.sendAppMessage(msg,
this._controlSuccess.bind(this),
this._controlFailure.bind(this));
};
//////////////////////////////////////////////////////////////////////////////
// Sender: Object Message Handling
//////////////////////////////////////////////////////////////////////////////
this._createDataObject = function(obj) {
// Store obj as UTF-8 encoded JSON string into .data:
var native_str_msg;
try {
native_str_msg = JSON.stringify(obj);
} catch(e) {
throw Error("First argument must be JSON-serializable.");
}
// ECMA v5.1, 15.12.3, Note 5: Values that do not have a JSON
// representation (such as undefined and functions) do not produce a
// String. Instead they produce the undefined value.
if (native_str_msg === undefined) {
throw TypeError(
"Argument at index 0 is not a JSON.stringify()-able object");
}
var utf8_str_msg = utf8.encode(native_str_msg);
var data = [];
for (var i = 0; i < utf8_str_msg.length; i++) {
data.push(utf8_str_msg.charCodeAt(i));
}
data.push(0); // zero-terminate
return {
obj: obj,
data: data,
json: native_str_msg,
};
};
this._completeObject = function(failureReasonOrUndefined) {
var completeObject = this.objectQueue.shift();
this._resetCurrent();
if (failureReasonOrUndefined === undefined) {
debugLog("Complete!");
} else {
_scheduleAsyncPostMessageError(completeObject.json, failureReasonOrUndefined);
}
};
this._chunkSuccess = function(e) {
var data = this.objectQueue[0].data;
debugLog("Sent " + this._chunkPayloadSize + " of " + data.length + " bytes");
this._offsetBytes += this._chunkPayloadSize;
if (this._offsetBytes === data.length) {
this._completeObject();
this._sendNext();
} else {
this._trySendNextChunk();
}
};
this._chunkFailure = function(e) {
this._failureCount++;
var willRetry = (this._failureCount <= 3);
console.error("Chunk failed to send (willRetry=" + willRetry + "): " + e);
if (willRetry) {
setTimeout(this._trySendNextChunk.bind(this), 1000); // 1s retry
} else {
this._completeObject("Too many failed transfer attempts");
this._sendNext();
}
};
this._trySendNextChunk = function() {
if (this._getNextMessageType() !== "object") {
// This is no longer our highest priority outgoing message.
// Send that message instead, and this message will be left in the queue
// andrestarted when appropriate.
this._resetCurrent();
this._sendNext();
return;
}
if (!_control.isSessionOpen()) {
// Make sure to start over if session is closed while chunks have been
// sent for the head object:
this._offsetBytes = 0;
this._chunkFailure("Session not open. Hint: check out the \"postmessageconnected\" event.");
return;
}
var data = this.objectQueue[0].data;
var sizeRemaining = data.length - this._offsetBytes;
debugLog("Sending next chunk, sizeRemaining: " + sizeRemaining);
this._chunkPayloadSize =
Math.min(_control.protocol.tx_chunk_size, sizeRemaining);
var n;
var isFirst = (this._offsetBytes === 0);
var isFirstBit;
if (isFirst) {
isFirstBit = (1 << 7);
n = data.length;
} else {
isFirstBit = 0;
n = this._offsetBytes;
}
var chunk = [
n & 255,
(n >> 8) & 255,
(n >> 16) & 255,
((n >> 24) & ~(1 << 7)) | isFirstBit
];
var chunkPayload = data.slice(
this._offsetBytes, this._offsetBytes + this._chunkPayloadSize);
Array.prototype.push.apply(chunk, chunkPayload);
debugLog("Sending Chunk Size: " + this._chunkPayloadSize);
__Pebble.sendAppMessage({ControlKeyChunk: chunk},
this._chunkSuccess.bind(this),
this._chunkFailure.bind(this));
};
//////////////////////////////////////////////////////////////////////////////
// Sender: Public Interface
//////////////////////////////////////////////////////////////////////////////
this.sendObject = function(obj) {
debugLog("Queuing up object message: " + JSON.stringify(obj));
var dataObj = this._createDataObject(obj);
this.objectQueue.push(dataObj)
this._sendNext();
};
this.sendControl = function(obj) {
debugLog("Sending control message: " + JSON.stringify(obj));
this.controlQueue.push(obj);
this._sendNext();
}
};
/*****************************************************************************
* postMessage(): Receiving chunks of inbound objects and reassembly
****************************************************************************/
var _in = new ChunkReceiver();
function ChunkReceiver() {
this.utf8_json_string = "";
this.total_size_bytes = 0;
this.received_size_bytes = 0;
this.handleChunkReceived = function handleChunkReceived(chunk) {
if (!chunk) {
return false;
}
var isExpectingFirst = (this.utf8_json_string.length === 0);
if (chunk.is_first != isExpectingFirst) {
console.error(
"Protocol out of sync! chunk.is_first=" + chunk.is_first +
" isExpectingFirst=" + isExpectingFirst);
return false;
}
if (chunk.is_first) {
this.total_size_bytes = chunk.total_size_bytes;
this.received_size_bytes = 0;
} else {
if (this.received_size_bytes != chunk.offset_bytes) {
console.error(
"Protocol out of sync! received_size_bytes=" +
this.received_size_bytes + " chunk.offset_bytes=" + chunk.offset_bytes);
return false;
}
if (this.received_size_bytes + chunk.data.length > this.total_size_bytes) {
console.error(
"Protocol out of sync! received_size_bytes=" + this.received_size_bytes +
" chunk.data.length=" + chunk.data.length +
" total_size_bytes=" + this.total_size_bytes);
return false;
}
}
debugLog("Received (" + this.received_size_bytes + " / " +
this.total_size_bytes + " bytes)");
debugLog("Payload size: " + chunk.data.length);
this.received_size_bytes += chunk.data.length;
var isLastChunk = (this.received_size_bytes == this.total_size_bytes);
var isLastChunkZeroTerminated = undefined;
if (isLastChunk) {
isLastChunkZeroTerminated = (chunk.data[chunk.data.length - 1] === 0);
}
// Copy the received data over:
var end = isLastChunk ? chunk.data.length - 1 : chunk.data.length;
for (var i = 0; i < end; i++) {
this.utf8_json_string += String.fromCharCode(chunk.data[i]);
}
if (isLastChunk) {
if (isLastChunkZeroTerminated) {
var json_string = utf8.decode(this.utf8_json_string);
var data;
try {
data = JSON.parse(json_string);
} catch (e) {
console.error(
"Dropping message, failed to parse JSON with error: " + e +
" (json_string=" + json_string + ")");
}
if (data !== undefined) {
_callHandlersForEvent('message', data);
}
} else {
console.error("Last Chunk wasn't zero terminated! Dropping message.");
}
this.utf8_json_string = "";
}
return true;
}
}
/*****************************************************************************
* postMessage() Session Control Protocol
****************************************************************************/
var ControlStateDisconnected = "ControlStateDisconnected";
var ControlStateAwaitingResetCompleteRemoteInitiated = "ControlStateAwaitingResetCompleteRemoteInitiated";
var ControlStateAwaitingResetCompleteLocalInitiated = "ControlStateAwaitingResetCompleteLocalInitiated";
var ControlStateSessionOpen = "ControlStateSessionOpen";
var ControlKeyResetRequest = "ControlKeyResetRequest";
var ControlKeyResetComplete = "ControlKeyResetComplete";
var ControlKeyChunk = "ControlKeyChunk";
var ControlKeyUnsupportedError = "ControlKeyUnsupportedError";
function _unpackResetCompleteMessage(data) {
debugLog("Got ResetComplete: " + data);
return {
min_version : data[0],
max_version : data[1],
max_tx_chunk_size : (data[2] << 8) | (data[3]),
max_rx_chunk_size : (data[4] << 8) | (data[5]),
};
};
function _unpackChunk(data) {
//debugLog("Got Chunk: " + data);
if (data.length <= 4) {
console.error("Chunk data too short to be valid!");
return;
}
var is_first_bit = (1 << 7);
var is_first = (is_first_bit === (data[3] & is_first_bit));
var chunk = {
is_first : is_first
};
var msbyte = (~is_first_bit) & data[3];
var num31bits = (msbyte << 24) | (data[2] << 16) | (data[1] << 8) | data[0];
if (is_first) {
chunk.total_size_bytes = num31bits;
} else {
chunk.offset_bytes = num31bits;
}
chunk.data = data.slice(4);
return chunk;
}
function _remoteProtocolValidateAndSet(remote) {
debugLog("Remote min: " + remote.min_version);
debugLog("Remote max: " + remote.max_version);
if (remote.min_version == undefined || remote.max_version == undefined ||
remote.min_version > PROTOCOL.max_version || remote.max_version < PROTOCOL.min_version) {
return false;
}
_control.protocol = {
version : Math.min(remote.max_version, PROTOCOL.max_version),
tx_chunk_size : Math.min(remote.max_rx_chunk_size, PROTOCOL.max_tx_chunk_size),
rx_chunk_size : Math.min(remote.max_tx_chunk_size, PROTOCOL.max_rx_chunk_size),
};
return true;
};
function _sendControlMessage(msg) {
_out.sendControl(msg);
}
function _controlSendResetComplete() {
var data = new Uint8Array(6);
data[0] = PROTOCOL.min_version;
data[1] = PROTOCOL.max_version;
data[2] = PROTOCOL.max_tx_chunk_size >> 8;
data[3] = PROTOCOL.max_tx_chunk_size;
data[4] = PROTOCOL.max_rx_chunk_size >> 8;
data[5] = PROTOCOL.max_rx_chunk_size;
_sendControlMessage({ ControlKeyResetComplete : arrayFromUint8Array(data) });
}
function _controlSendResetRequest() {
_sendControlMessage({ ControlKeyResetRequest : 0 });
}
function _controlSendUnsupportedError() {
_sendControlMessage({ ControlKeyUnsupportedError : 0 });
}
var ControlHandlers = {
ControlStateDisconnected : function(payload) {
},
ControlStateAwaitingResetCompleteRemoteInitiated : function(payload) {
if (ControlKeyResetComplete in payload) {
var remote_protocol = _unpackResetCompleteMessage(payload[ControlKeyResetComplete]);
// NOTE: This should *always* be true, we should never receive a
// ResetComplete response from the Remote in this state since it already
// knows it is unsupported
if (_remoteProtocolValidateAndSet(remote_protocol)) {
_control.enter(ControlStateSessionOpen);
}
} else if (ControlKeyResetRequest in payload) {
_control.enter(ControlStateAwaitingResetCompleteRemoteInitiated); // Re-enter this state
} else if (ControlKeyChunk in payload) {
_control.enter(ControlStateAwaitingResetCompleteLocalInitiated);
} else if (ControlKeyUnsupportedError in payload) {
throw Error("Unsupported protocol error: " + payload[ControlKeyUnsupportedError]);
}
},
ControlStateAwaitingResetCompleteLocalInitiated : function(payload) {
if (ControlKeyResetComplete in payload) {
var remote_protocol = _unpackResetCompleteMessage(payload[ControlKeyResetComplete]);
debugLog("Remote Protocol: " + remote_protocol);
if (_remoteProtocolValidateAndSet(remote_protocol)) {
debugLog("OK Remote protocol...");
_controlSendResetComplete();
_control.enter(ControlStateSessionOpen);
} else {
_controlSendUnsupportedError();
}
} else {
; // Ignore, we're in this state because we already sent a ResetRequest
}
},
ControlStateSessionOpen : function(payload) {
if (ControlKeyChunk in payload) {
var chunk = _unpackChunk(payload[ControlKeyChunk]);
if (false === _in.handleChunkReceived(chunk)) {
_control.enter(ControlStateAwaitingResetCompleteLocalInitiated);
}
} else if (ControlKeyResetRequest in payload) {
_control.enter(ControlStateAwaitingResetCompleteRemoteInitiated);
} else {
// FIXME: This could be an UnsupportedError, we probably don't want to
// keep on trying to negotiate protocol
_control.enter(ControlStateAwaitingResetCompleteLocalInitiated);
}
},
};
var ControlTransitions = {
ControlStateDisconnected : function(from_state) {
_control.resetProtocol();
_control.state = ControlStateAwaitingResetCompleteRemoteInitiated;
},
ControlStateAwaitingResetCompleteRemoteInitiated : function(from_state) {
_control.resetProtocol();
_control.state = ControlStateAwaitingResetCompleteRemoteInitiated;
_controlSendResetComplete();
},
ControlStateAwaitingResetCompleteLocalInitiated : function(from_state) {
if (from_state != ControlStateAwaitingResetCompleteLocalInitiated) {
// Coming from elsewhere, send the ResetRequest
_controlSendResetRequest();
}
_control.resetProtocol();
_control.state = ControlStateAwaitingResetCompleteLocalInitiated;
},
ControlStateSessionOpen : function(from_state) {
_control.state = ControlStateSessionOpen;
_callHandlersForEvent('postmessageconnected');
},
};
var PROTOCOL = {
min_version : 1,
max_version : 1,
max_tx_chunk_size : 1000,
max_rx_chunk_size : 1000,
};
var _control = {
state : ControlStateDisconnected,
handle : function(msg) {
debugLog("Handle " + this.state + "(" + JSON.stringify(msg.payload) + "}");
ControlHandlers[this.state](msg.payload);
},
enter : function(to_state) {
debugLog("Enter " + this.state + " ===> " + to_state);
var prev_state = this.state;
ControlTransitions[to_state](this.state);
if (prev_state == ControlStateSessionOpen && to_state != ControlStateSessionOpen) {
_callHandlersForEvent('postmessagedisconnected');
}
},
isSessionOpen: function() {
return (this.state === ControlStateSessionOpen);
},
resetProtocol: function() {
this.protocol = {
version : 0,
tx_chunk_size : 0,
rx_chunk_size : 0,
};
},
protocol : {
version : 0,
tx_chunk_size : 0,
rx_chunk_size : 0,
},
};
__Pebble.addEventListener('appmessage', function(msg) {
_control.handle(msg);
});
__Pebble.addEventListener('ready', function(e) {
_control.enter(ControlStateAwaitingResetCompleteLocalInitiated);
});
})();

View file

@ -0,0 +1,36 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(p) {
if (!p === undefined) {
console.error('Pebble object not found!?');
return;
}
// Aliases:
p.on = p.addEventListener;
p.off = p.removeEventListener;
// For Android (WebView-based) pkjs, print stacktrace for uncaught errors:
if (typeof window !== 'undefined' && window.addEventListener) {
window.addEventListener('error', function(event) {
if (event.error && event.error.stack) {
console.error('' + event.error + '\n' + event.error.stack);
}
});
}
})(Pebble);

25
sdk/include/rocky.c Normal file
View file

@ -0,0 +1,25 @@
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <pebble.h>
extern bool rocky_event_loop_with_resource(uint32_t resource_id);
int main(void) {
Window *window = window_create();
window_stack_push(window, false);
rocky_event_loop_with_resource(RESOURCE_ID_JS_SNAPSHOT);
}

17
sdk/include/rocky.js Normal file
View file

@ -0,0 +1,17 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
module.exports = _rocky;

View file

@ -0,0 +1,53 @@
ENTRY(main)
MEMORY
{
APP (rwx) : ORIGIN = 0, LENGTH = @MAX_APP_MEMORY_SIZE@
}
SECTIONS
{
.header :
{
KEEP(*(.pbl_header))
} > APP
/* -- DO NOT ADD ANY NEW SECTIONS HERE AND DO NOT CHANGE THE ALIGNMENT -- */
/* The GNU build ID is tacked to the end of the PebbleProcessInfo struct: */
.note.gnu.build-id ALIGN(1) : {
PROVIDE(BUILD_ID = .);
KEEP(*(.note.gnu.build-id))
} > APP
.text :
{
*(.text)
*(.text.*)
*(.rodata)
*(.rodata*)
} > APP
.data :
{
KEEP(*(.data))
*(.data.*)
} > APP
.bss :
{
*(.bss)
*(.bss.*)
} > APP
DISCARD :
{
libc.a ( * )
libm.a ( * )
libgcc.a ( * )
*(.eh_frame)
}
}
/* vim:filetype=ld */

9
sdk/readme.txt Normal file
View file

@ -0,0 +1,9 @@
This is the source directory for the SDK. As part of a normal waf build, this directory is mostly copied into the tintin/build/sdk directory. Scripts from tintin/tools/generate_native_sdk are also used to place other files into that directory based on auto-generated export tables pulled from the source.
To export a symbol to be usable from the SDK, add it to tintin/tools/generate_native_sdk/exported_symbols.json
The wscript in this directory is used to build files into the tintin/build/sdk directory. src_wscript is the build script that becomes tintin/build/sdk/wscript, which is to be used by app developers to build their apps.
For info on actually building apps, see src_readme.txt (the readme for the output redistributable SDK).

13
sdk/sdk_package.json Normal file
View file

@ -0,0 +1,13 @@
{
"name": "pebble-sdk",
"author": "Pebble Technology",
"version": "1.0.0",
"dependencies": {
"json-loader": "^0.5.4",
"restrict-resource-webpack-plugin": "^1.0.0",
"rocky-lint": "^2.0.0",
"utf8": "^2.1.1",
"webpack": "^1.1.3",
"webpack-fail-plugin": "^1.0.5"
}
}

3
sdk/sdk_requirements.txt Normal file
View file

@ -0,0 +1,3 @@
freetype-py==1.0
sh==1.08
pypng==0.0.17

View file

@ -0,0 +1,42 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
module.exports = {
"env": {
"browser": true,
"commonjs": true,
"es6": true
},
"extends": "eslint:recommended",
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
]
}
};

1
sdk/tests/include/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
node_modules

View file

@ -0,0 +1,50 @@
# sdk/include/*.js unit testing how-to
This folder contains tests for the .js code in sdk/include.
## Installing dependencies
1. `cd sdk/tests/include`
2. `npm install`
## Running tests
1. `cd sdk/tests/include`
2. `npm test` this runs the tests using the [mocha](http://mochajs.org/) test runner.
You should see some output, similar to this:
```
$ npm test
> pebble-pkjs-tests@1.0.0 test /Users/martijn/tintin/sdk/tests/include
> NODE_PATH=../include ./node_modules/mocha/bin/mocha *.js
Pebble
interprets received postMessage API data as UTF-8
✓ interprets [34,34] as ""
✓ interprets [34,240,159,146,169,34] as "💩"
✓ interprets [34,237,160,181,237,188,128,34] as {}
✓ interprets [34,196,145,34] as "đ"
✓ interprets [34,224,160,149,34] as "ࠕ"
encodes sent postMessage API data as UTF-8
sendAppMessage: [object Object]
✓ encodes "" as [34,34,0]
sendAppMessage: [object Object]
✓ encodes "💩" as [34,240,159,146,169,34,0]
sendAppMessage: [object Object]
✓ encodes "đ" as [34,196,145,34,0]
sendAppMessage: [object Object]
✓ encodes "ࠕ" as [34,224,160,149,34,0]
9 passing (25ms)
```
## Linting the test code
1. `cd sdk/tests/include`
2. `npm run-script lint`
## Adding tests
* You can add `test_xyz.js` files in the `tests` folder. It will automatically get picked up by the test runner.
* If you need to a mock for the global `Pebble` object, check out `pebble-mock.js`. It's probably worth using and extending that than to re-invent the wheel.
* When adding additional dependencies (node packages), make sure to install them using `npm install --save-dev <PACKAGE_NAME>` so that they get added to the `devDependencies` in the `package.json` file.

View file

@ -0,0 +1,23 @@
{
"name": "pebble-pkjs-tests",
"version": "1.0.0",
"description": "Unit tests for .js pkjs assets",
"main": "index.js",
"dependencies": {
"utf8": "^2.1.1"
},
"devDependencies": {
"eslint": "^2.10.2",
"eslint-plugin-import": "^1.11.1",
"eslint-plugin-jsx-a11y": "^2.0.1",
"mocha": "^2.5.3",
"simple-mock": "^0.7.0",
"unroll": "^1.1.0"
},
"scripts": {
"test": "NODE_PATH=./node_modules ./node_modules/mocha/bin/mocha *.js",
"lint": "./node_modules/eslint/bin/eslint.js -c .eslintrc.js *.js"
},
"author": "",
"license": "© Pebble Technology Corp."
}

View file

@ -0,0 +1,50 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-env mocha */
/* eslint func-names: 0 */
/* eslint no-console: 0 */
// Constructor to build a mock for the global Pebble object:
module.exports = function() {
const simple = require('simple-mock');
const assert = require('assert');
var eventHandlers = {
appmessage: [],
ready: []
};
simple.mock(this, 'addEventListener', (event_name, handler) => {
assert(event_name in eventHandlers, '\'' + event_name + '\' not known');
eventHandlers[event_name].push(handler);
});
simple.mock(this, 'handleEvent', (event) => {
assert(event.name in eventHandlers, '\'' + event.name + '\' not known');
for (let handler of eventHandlers[event.name]) {
handler(event);
}
});
simple.mock(this, 'sendAppMessage', (msg, complCb, errCb) => {
console.log(
'sendAppMessage: ' + msg + ' complCb: ' + complCb + ' errCb: ' + errCb);
if (complCb) {
complCb(msg);
}
});
};

View file

@ -0,0 +1,457 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-env mocha */
/* eslint func-names: 0 */
const assert = require('assert');
const unroll = require('unroll');
unroll.use(it);
// Override setTimeout() to fire immediately:
var origSetTimeout = setTimeout;
setTimeout = function(f, t) {
origSetTimeout(f.bind(undefined), 0);
};
describe('Pebble', () => {
var mockPebble;
const simulateReceivingAppMessageEvent = (payload) => {
const appMessageEvent = {
name: 'appmessage',
payload: payload
};
global.Pebble.handleEvent(appMessageEvent);
};
const enterSessionOpen = () => {
global.Pebble.handleEvent({ name : "ready" });
var data = new Uint8Array(6);
data[0] = 1;
data[1] = 3;
data[2] = 0;
data[3] = 155;
data[4] = 0;
data[5] = 155;
simulateReceivingAppMessageEvent({ 'ControlKeyResetComplete' : Array.from(data) });
mockPebble.sendAppMessage.reset();
};
const createChunk = (offset, size, data) => {
if (offset == 0) { // First msg
var isFirst = (1 << 7);
var n = size + 1;
} else {
var isFirst = 0;
var n = offset;
}
var rv = [ (n) & 255,
(n >> 8) & 255,
(n >> 16) & 255,
((n >> 24) & ~(1 << 7)) | isFirst ];
Array.prototype.push.apply(rv, data.slice(offset, offset + size));
if (offset + size == data.length) {
rv.push(0);
}
return { "ControlKeyChunk" : rv };
};
const simulateReceivingPostMessageChunk = () => {
var data = '{ "msg_num" : 0 }'.split('').map(function(x) { return x.charCodeAt(0); });
var chunk = createChunk(0, data.length, data);
simulateReceivingAppMessageEvent(chunk);
};
beforeEach(() => {
// Create a new mock for the Pebble global object for each test:
const PebbleMockConstructor = require('./pebble-mock.js');
global.Pebble = new PebbleMockConstructor();
// Keep a reference to the mock that will be "wrapped" as soon as _pkjs_message_wrapper.js
// is loaded...
mockPebble = global.Pebble;
// Reload it to 'patch' the Pebble object:
const message_js_path = '../../include/_pkjs_message_wrapper.js';
delete require.cache[require.resolve(message_js_path)];
require(message_js_path);
enterSessionOpen();
});
/****************************************************************************
* Message Encoding
***************************************************************************/
describe('interprets received postMessage API data as UTF-8', () => {
unroll('interprets #utf8_data as #result', (done, fixture) => {
global.Pebble.on('message', (event) => {
assert.equal(event.type, 'message');
assert.equal(event.data, fixture.result);
done();
});
const payload = createChunk(0, fixture.utf8_data.length, fixture.utf8_data);
if (fixture.result instanceof Error) {
assert.throws(() => {
simulateReceivingAppMessageEvent(payload);
}, typeof(fixture.result), fixture.result.message);
done();
} else {
simulateReceivingAppMessageEvent(payload);
}
}, [
['utf8_data', 'result'],
// empty string:
[[34, 34], ''],
// Pile of Poo, in double quotes:
[[34, 240, 159, 146, 169,34], '💩'],
// Surrogates are illegal in UTF-8:
[[34, 0xED, 0xA0, 0xB5, 0xED, 0xBC, 0x80, 34], Error('Lone surrogate U+D835 is not a scalar value')],
// 2-byte code point, in double quotes:
[[34, 196, 145, 34], '\u0111'],
// 3-byte codepoint, in double quotes:
[[34, 0xE0, 0xA0, 0x95, 34], '\u0815']
]);
});
describe('encodes sent postMessage API data as UTF-8', () => {
unroll('encodes #input as #utf8_data', (done, fixture) => {
global.Pebble.postMessage(fixture.input);
assert.equal(mockPebble.sendAppMessage.callCount, 1);
const lastAppMessage = mockPebble.sendAppMessage.lastCall.args[0];
assert.deepEqual(lastAppMessage['ControlKeyChunk'].slice(4), fixture.utf8_data);
done();
}, [
['input', 'utf8_data'],
// empty string:
['', [34, 34, 0]],
// Pile of Poo, in double quotes:
['💩', [34, 240, 159, 146, 169, 34, 0]],
// 2-byte code point, in double quotes:
['\u0111', [34, 196, 145, 34, 0]],
// 3-byte codepoint, in double quotes:
['\u0815', [34, 0xE0, 0xA0, 0x95, 34, 0]]
]);
});
/****************************************************************************
* Message Handlers
***************************************************************************/
describe('Ensure that AppMessage is blocked', () => {
it('tries to register a Pebble.on("appmessage") handler', (done) => {
assert.throws(() => {
global.Pebble.on('appmessage', (e) => {
assert(0, "Should not have been called");
});
}, /not supported/);
// If this results in our callback being called, we'll throw an Error().
simulateReceivingAppMessageEvent({ 'KEY' : 'DATA' });
done();
});
it('tries to Pebble.addEventListener("appmessage")', (done) => {
assert.throws(() => {
global.Pebble.addEventListener('appmessage', (e) => {
// This will be thrown if the eventlistener was registered
assert(0, "Should not have been called");
});
}, /not supported/);
// If this results in our callback being called, we'll throw an Error().
simulateReceivingAppMessageEvent({ 'KEY' : 'DATA' });
done();
});
it('tries to call Pebble.sendAppMessage()', (done) => {
assert.notStrictEqual(typeof global.Pebble.sendAppMessage, 'function');
assert.equal(global.Pebble.sendAppMessage, undefined);
done();
});
});
describe('registers multiple message handlers', () => {
unroll('registers #num_handlers handlers to receive #num_messages messages each', (done, fixture) => {
var callback_count = 0;
var handler = function(e) { ++callback_count; };
for (var h = 0; h < fixture.num_handlers; ++h) {
global.Pebble.on('message', handler);
}
for (var i = 0; i < fixture.num_messages; ++i) {
simulateReceivingPostMessageChunk();
}
assert.equal(callback_count, fixture.num_handlers * fixture.num_messages);
done();
}, [
[ 'num_handlers', 'num_messages' ],
[ 1, 1 ],
[ 2, 1 ],
[ 3, 2 ],
]);
});
describe('registers multiple message handlers, unsubscribes one', () => {
unroll('registers #num_handlers, then unregisters #num_unregister', (done, fixture) => {
var callback_count = 0;
var handler = function(e) { ++callback_count; };
for (var h = 0; h < fixture.num_handlers; ++h) {
global.Pebble.on('message', handler);
}
for (var u = 0; u < fixture.num_unregister; ++u) {
global.Pebble.off('message', handler);
}
simulateReceivingPostMessageChunk();
assert.equal(callback_count, fixture.num_handlers - fixture.num_unregister);
done();
}, [
[ 'num_handlers', 'num_unregister' ],
[ 4, 2 ],
[ 10, 10 ],
]);
});
describe('call Pebble.off("message", handler) from within that event handler', () => {
unroll('calling while #num_registered other handlers are registered', (done, fixture) => {
var callback_count = 0;
var handler = function(e) { ++callback_count; };
var remove_handler = function(e) { ++callback_count; global.Pebble.off('message', remove_handler); }
global.Pebble.on('message', remove_handler);
for (var i = 0; i < fixture.num_registered; ++i) {
global.Pebble.on('message', handler);
}
simulateReceivingPostMessageChunk();
assert.equal(callback_count, fixture.num_registered + 1);
// Now that the remove_handler has been removed, send another and make
// sure that we have one less called.
callback_count = 0;
simulateReceivingPostMessageChunk();
assert.equal(callback_count, fixture.num_registered);
done();
}, [
[ 'num_registered' ],
[ 0 ],
[ 1 ],
[ 10 ],
]);
});
/****************************************************************************
* postmessageerror event
***************************************************************************/
describe('postmessageerror Event', () => {
it('event.data is set to the object that was attempted to be sent', (done) => {
global.Pebble.handleEvent({ name : "ready" });
mockPebble.sendAppMessage.reset();
global.Pebble.on('postmessageerror', function(e) {
assert.deepEqual(e.data, {b: 'c'});
done();
});
var a = { b: 'c' };
global.Pebble.postMessage(a);
a.b = 'd'; // modify to test that a copy of 'a' is sent
});
});
/****************************************************************************
* postmessageconnected / postmessagedisconnected event
***************************************************************************/
describe('Connection Events', () => {
unroll('postmessageconnected. Start connected: #start_connected', (done, fixture) => {
var connected_call_count = 0;
if (!fixture.start_connected) {
// Disconnect
global.Pebble.handleEvent({ name : "ready" });
}
global.Pebble.on('postmessageconnected', function(e) {
assert.equal(e.type, 'postmessageconnected');
++connected_call_count;
});
enterSessionOpen(); // establish connection
if (fixture.start_connected) {
assert.equal(connected_call_count, 2);
} else {
assert.equal(connected_call_count, 1);
}
done();
}, [
[ 'start_connected' ],
[ true, ],
[ false, ],
]);
unroll('postmessagedisconnected. Start disconnected: #start_disconnected', (done, fixture) => {
var disconnected_call_count = 0;
if (fixture.start_disconnected) {
// Disconnect
global.Pebble.handleEvent({ name : "ready" });
}
global.Pebble.on('postmessagedisconnected', function(e) {
assert.equal(e.type, 'postmessagedisconnected');
++disconnected_call_count;
});
if (fixture.start_disconnected) {
// Need to establish a connection before we can disconnect
enterSessionOpen();
}
global.Pebble.handleEvent({ name : "ready" }); // Disconnect again
if (fixture.start_disconnected) {
assert.equal(disconnected_call_count, 2);
} else {
assert.equal(disconnected_call_count, 1);
}
done();
}, [
[ 'start_disconnected' ],
[ true, ],
[ false, ],
]);
});
/****************************************************************************
* Control Layer
***************************************************************************/
describe('Control Layer', () => {
it('Ready message => ResetRequest', (done) => {
global.Pebble.handleEvent({ name : "ready" });
assert.equal(mockPebble.sendAppMessage.callCount, 1);
assert('ControlKeyResetRequest' in mockPebble.sendAppMessage.lastCall.args[0]);
done();
});
it ('Disconnected => AwaitingResetCompleteLocalInitiated => SessionOpen', (done) => {
global.Pebble.handleEvent({ name : "ready" });
mockPebble.sendAppMessage.reset();
var data = new Uint8Array(6);
data[0] = 1;
data[1] = 3;
data[2] = 0;
data[3] = 155;
data[4] = 0;
data[5] = 155;
simulateReceivingAppMessageEvent({ 'ControlKeyResetComplete' : Array.from(data) });
assert.equal(mockPebble.sendAppMessage.callCount, 1);
assert('ControlKeyResetComplete' in mockPebble.sendAppMessage.lastCall.args[0]);
done();
});
it ('Disconnected => AwaitingResetCompleteLocalInitiated => UnsupportedError', (done) => {
global.Pebble.handleEvent({ name : "ready" });
mockPebble.sendAppMessage.reset();
var data = new Uint8Array(6);
data[0] = 155; // Unsupported min version
data[1] = 156; // Unsupported max version
data[2] = 0;
data[3] = 155;
data[4] = 0;
data[5] = 155;
simulateReceivingAppMessageEvent({ 'ControlKeyResetComplete' : Array.from(data) });
assert.equal(mockPebble.sendAppMessage.callCount, 1);
assert('ControlKeyUnsupportedError' in mockPebble.sendAppMessage.lastCall.args[0]);
done();
});
it ('SessionOpen => AwaitingResetCompleteRemoteInitiated => UnsupportedError => Error', (done) => {
simulateReceivingAppMessageEvent({ 'ControlKeyResetRequest' : 0 });
assert.equal(mockPebble.sendAppMessage.callCount, 1);
assert('ControlKeyResetComplete' in mockPebble.sendAppMessage.lastCall.args[0]);
try {
simulateReceivingAppMessageEvent({ 'ControlKeyUnsupportedError' : "Test Error" });
} catch (e) {
assert.equal("Error: Unsupported protocol error: Test Error", e.toString());
}
done();
});
it ('Retry sending control message, check max retries.', (done) => {
// override setTimeout
setTimeout = function(fn, delay) {
fn(); // Use a synchronous call here because we want to make sure that there
// is a maximum of 3 callbacks. If we do these asynchronously,
// there is no nice way to test this.
}
// Replace our sendAppMessage with one that will always call the error callback
_mockSendAppMessage = mockPebble.sendAppMessage;
mockPebble.sendAppMessage = function(msg, complCb, errCb) {
_mockSendAppMessage(msg, undefined, errCb);
errCb(msg);
};
simulateReceivingAppMessageEvent({ 'ControlKeyResetRequest' : 0 });
// Should be called 1 + 3 retries, no more.
assert.equal(_mockSendAppMessage.callCount, 4);
done();
});
it('Retry sending control message, asynch', (done) => {
// This test will fail due to timeout if retry isn't working correctly.
var _setTimeout = setTimeout;
setTimeout = function(fn, delay) {
_setTimeout(fn, 0);
}
_mockSendAppMessage = mockPebble.sendAppMessage;
mockPebble.sendAppMessage = function(msg, complCb, errCb) {
_mockSendAppMessage(msg, undefined, errCb);
if (_mockSendAppMessage.callCount == 4) {
// 4 calls is 1 + 3 retries. We're done here
done();
} else {
_setTimeout(errCb.bind(msg), 0);
}
};
simulateReceivingAppMessageEvent({ 'ControlKeyResetRequest' : 0 });
});
});
it('.postMessage(nonJSONable) should throw a TypeError', (done) => {
var expectedMsg =
"Argument at index 0 is not a JSON.stringify()-able object";
assert.throws(
() => { global.Pebble.postMessage(undefined); }, TypeError, expectedMsg);
assert.throws(
() => { global.Pebble.postMessage(() => {}); }, TypeError, expectedMsg);
done()
});
});

14
sdk/tools/__init__.py Normal file
View file

@ -0,0 +1,14 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

309
sdk/tools/inject_metadata.py Executable file
View file

@ -0,0 +1,309 @@
#!/usr/bin/env python
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
from struct import pack, unpack
import os
import os.path
import sys
import time
from subprocess import Popen, PIPE
from shutil import copy2
from binascii import crc32
from struct import pack
from pbpack import ResourcePack
import stm32_crc
# Pebble App Metadata Struct
# These are offsets of the PebbleProcessInfo struct in src/fw/app_management/pebble_process_info.h
HEADER_ADDR = 0x0 # 8 bytes
STRUCT_VERSION_ADDR = 0x8 # 2 bytes
SDK_VERSION_ADDR = 0xa # 2 bytes
APP_VERSION_ADDR = 0xc # 2 bytes
LOAD_SIZE_ADDR = 0xe # 2 bytes
OFFSET_ADDR = 0x10 # 4 bytes
CRC_ADDR = 0x14 # 4 bytes
NAME_ADDR = 0x18 # 32 bytes
COMPANY_ADDR = 0x38 # 32 bytes
ICON_RES_ID_ADDR = 0x58 # 4 bytes
JUMP_TABLE_ADDR = 0x5c # 4 bytes
FLAGS_ADDR = 0x60 # 4 bytes
NUM_RELOC_ENTRIES_ADDR = 0x64 # 4 bytes
UUID_ADDR = 0x68 # 16 bytes
RESOURCE_CRC_ADDR = 0x78 # 4 bytes
RESOURCE_TIMESTAMP_ADDR = 0x7c # 4 bytes
VIRTUAL_SIZE_ADDR = 0x80 # 2 bytes
STRUCT_SIZE_BYTES = 0x82
# Pebble App Flags
# These are PebbleAppFlags from src/fw/app_management/pebble_process_info.h
PROCESS_INFO_STANDARD_APP = (0)
PROCESS_INFO_WATCH_FACE = (1 << 0)
PROCESS_INFO_VISIBILITY_HIDDEN = (1 << 1)
PROCESS_INFO_VISIBILITY_SHOWN_ON_COMMUNICATION = (1 << 2)
PROCESS_INFO_ALLOW_JS = (1 << 3)
PROCESS_INFO_HAS_WORKER = (1 << 4)
# Max app size, including the struct and reloc table
# Note that even if the app is smaller than this, it still may be too big, as it needs to share this
# space with applib/ which changes in size from release to release.
MAX_APP_BINARY_SIZE = 0x10000
# This number is a rough estimate, but should not be less than the available space.
# Currently, app_state uses up a small part of the app space.
# See also APP_RAM in stm32f2xx_flash_fw.ld and APP in pebble_app.ld.
MAX_APP_MEMORY_SIZE = 24 * 1024
# This number is a rough estimate, but should not be less than the available space.
# Currently, worker_state uses up a small part of the worker space.
# See also WORKER_RAM in stm32f2xx_flash_fw.ld
MAX_WORKER_MEMORY_SIZE = 10 * 1024
ENTRY_PT_SYMBOL = 'main'
JUMP_TABLE_ADDR_SYMBOL = 'pbl_table_addr'
DEBUG = False
class InvalidBinaryError(Exception):
pass
def inject_metadata(target_binary, target_elf, resources_file, timestamp, allow_js=False,
has_worker=False):
if target_binary[-4:] != '.bin':
raise Exception("Invalid filename <%s>! The filename should end in .bin" % target_binary)
def get_nm_output(elf_file):
nm_process = Popen(['arm-none-eabi-nm', elf_file], stdout=PIPE)
# Popen.communicate returns a tuple of (stdout, stderr)
nm_output = nm_process.communicate()[0]
if not nm_output:
raise InvalidBinaryError()
nm_output = [ line.split() for line in nm_output.splitlines() ]
return nm_output
def get_symbol_addr(nm_output, symbol):
# nm output looks like the following...
#
# U _ITM_registerTMCloneTable
# 00000084 t jump_to_pbl_function
# U _Jv_RegisterClasses
# 0000009c T main
# 00000130 T memset
#
# We don't care about the lines that only have two columns, they're not functions.
for sym in nm_output:
if symbol == sym[-1] and len(sym) == 3:
return int(sym[0], 16)
raise Exception("Could not locate symbol <%s> in binary! Failed to inject app metadata" %
(symbol))
def get_virtual_size(elf_file):
""" returns the virtual size (static memory usage, .text + .data + .bss) in bytes """
readelf_bss_process = Popen("arm-none-eabi-readelf -S '%s'" % elf_file,
shell=True, stdout=PIPE)
readelf_bss_output = readelf_bss_process.communicate()[0]
# readelf -S output looks like the following...
#
# [Nr] Name Type Addr Off Size ES Flg Lk Inf Al
# [ 0] NULL 00000000 000000 000000 00 0 0 0
# [ 1] .header PROGBITS 00000000 008000 000082 00 A 0 0 1
# [ 2] .text PROGBITS 00000084 008084 0006be 00 AX 0 0 4
# [ 3] .rel.text REL 00000000 00b66c 0004d0 08 23 2 4
# [ 4] .data PROGBITS 00000744 008744 000004 00 WA 0 0 4
# [ 5] .bss NOBITS 00000748 008748 000054 00 WA 0 0 4
last_section_end_addr = 0
# Find the .bss section and calculate the size based on the end of the .bss section
for line in readelf_bss_output.splitlines():
if len(line) < 10:
continue
# Carve off the first column, since it sometimes has a space in it which screws up the
# split. Two leading spaces, a square bracket, 2 digits (with space padding),
# a second square brack is 6
line = line[6:]
columns = line.split()
if len(columns) < 6:
continue
if columns[0] == '.bss':
addr = int(columns[2], 16)
size = int(columns[4], 16)
last_section_end_addr = addr + size
elif columns[0] == '.data' and last_section_end_addr == 0:
addr = int(columns[2], 16)
size = int(columns[4], 16)
last_section_end_addr = addr + size
if last_section_end_addr != 0:
return last_section_end_addr
sys.stderr.writeline("Failed to parse ELF sections while calculating the virtual size\n")
sys.stderr.write(readelf_bss_output)
raise Exception("Failed to parse ELF sections while calculating the virtual size")
def get_relocate_entries(elf_file):
""" returns a list of all the locations requiring an offset"""
# TODO: insert link to the wiki page I'm about to write about PIC and relocatable values
entries = []
# get the .data locations
readelf_relocs_process = Popen(['arm-none-eabi-readelf', '-r', elf_file], stdout=PIPE)
readelf_relocs_output = readelf_relocs_process.communicate()[0]
lines = readelf_relocs_output.splitlines()
i = 0
reading_section = False
while i < len(lines):
if not reading_section:
# look for the next section
if lines[i].startswith("Relocation section '.rel.data"):
reading_section = True
i += 1 # skip the column title section
else:
if len(lines[i]) == 0:
# end of the section
reading_section = False
else:
entries.append(int(lines[i].split(' ')[0], 16))
i += 1
# get any Global Offset Table (.got) entries
readelf_relocs_process = Popen(['arm-none-eabi-readelf', '--sections', elf_file],
stdout=PIPE)
readelf_relocs_output = readelf_relocs_process.communicate()[0]
lines = readelf_relocs_output.splitlines()
for line in lines:
# We shouldn't need to do anything with the Procedure Linkage Table since we don't
# actually export functions
if '.got' in line and '.got.plt' not in line:
words = line.split(' ')
while '' in words:
words.remove('')
section_label_idx = words.index('.got')
addr = int(words[section_label_idx + 2], 16)
length = int(words[section_label_idx + 4], 16)
for i in range(addr, addr + length, 4):
entries.append(i)
break
return entries
nm_output = get_nm_output(target_elf)
try:
app_entry_address = get_symbol_addr(nm_output, ENTRY_PT_SYMBOL)
except:
raise Exception("Missing app entry point! Must be `int main(void) { ... }` ")
jump_table_address = get_symbol_addr(nm_output, JUMP_TABLE_ADDR_SYMBOL)
reloc_entries = get_relocate_entries(target_elf)
statinfo = os.stat(target_binary)
app_load_size = statinfo.st_size
if resources_file is not None:
with open(resources_file, 'rb') as f:
pbpack = ResourcePack.deserialize(f, is_system=False)
resource_crc = pbpack.get_content_crc()
else:
resource_crc = 0
if DEBUG:
copy2(target_binary, target_binary + ".orig")
with open(target_binary, 'r+b') as f:
total_app_image_size = app_load_size + (len(reloc_entries) * 4)
if total_app_image_size > MAX_APP_BINARY_SIZE:
raise Exception("App image size is %u (app %u relocation table %u). Must be smaller "
"than %u bytes" % (total_app_image_size,
app_load_size,
len(reloc_entries) * 4,
MAX_APP_BINARY_SIZE))
def read_value_at_offset(offset, format_str, size):
f.seek(offset)
return unpack(format_str, f.read(size))
app_bin = f.read()
app_crc = stm32_crc.crc32(app_bin[STRUCT_SIZE_BYTES:])
[app_flags] = read_value_at_offset(FLAGS_ADDR, '<L', 4)
if allow_js:
app_flags = app_flags | PROCESS_INFO_ALLOW_JS
if has_worker:
app_flags = app_flags | PROCESS_INFO_HAS_WORKER
app_virtual_size = get_virtual_size(target_elf)
struct_changes = {
'load_size' : app_load_size,
'entry_point' : "0x%08x" % app_entry_address,
'symbol_table' : "0x%08x" % jump_table_address,
'flags' : app_flags,
'crc' : "0x%08x" % app_crc,
'num_reloc_entries': "0x%08x" % len(reloc_entries),
'resource_crc' : "0x%08x" % resource_crc,
'timestamp' : timestamp,
'virtual_size': app_virtual_size
}
def write_value_at_offset(offset, format_str, value):
f.seek(offset)
f.write(pack(format_str, value))
write_value_at_offset(LOAD_SIZE_ADDR, '<H', app_load_size)
write_value_at_offset(OFFSET_ADDR, '<L', app_entry_address)
write_value_at_offset(CRC_ADDR, '<L', app_crc)
write_value_at_offset(RESOURCE_CRC_ADDR, '<L', resource_crc)
write_value_at_offset(RESOURCE_TIMESTAMP_ADDR, '<L', timestamp)
write_value_at_offset(JUMP_TABLE_ADDR, '<L', jump_table_address)
write_value_at_offset(FLAGS_ADDR, '<L', app_flags)
write_value_at_offset(NUM_RELOC_ENTRIES_ADDR, '<L', len(reloc_entries))
write_value_at_offset(VIRTUAL_SIZE_ADDR, "<H", app_virtual_size)
# Write the reloc_entries past the end of the binary. This expands the size of the binary,
# but this new stuff won't actually be loaded into ram.
f.seek(app_load_size)
for entry in reloc_entries:
f.write(pack('<L', entry))
f.flush()
return struct_changes

138
sdk/tools/memory_reports.py Normal file
View file

@ -0,0 +1,138 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def _convert_bytes_to_kilobytes(number_bytes):
"""
Convert the input from bytes into kilobytes
:param number_bytes: the number of bytes to convert
:return: the input value converted to kilobytes
"""
NUMBER_BYTES_IN_KBYTE = 1024
return int(number_bytes) / NUMBER_BYTES_IN_KBYTE
def app_memory_report(platform_name, bin_type, app_size, max_ram, free_ram, resource_size=None,
max_resource_size=None):
"""
This method provides a formatted string for printing the memory usage of this binary to the
console.
:param platform_name: the name of the current HW platform being targeted
:param bin_type: the type of binary being built (app, lib, worker)
:param app_size: the size of the binary
:param max_ram: the maximum allowed size of the binary
:param free_ram: the amount of remaining memory
:param resource_size: the size of the resource pack
:param max_resource_size: the maximum allowed size of the resource pack
:return: a tuple containing the color for the string print, and the string to print
"""
LABEL = "-------------------------------------------------------\n{} {} MEMORY USAGE\n"
RESOURCE_SIZE = "Total size of resources: {} bytes / {}KB\n"
MEMORY_USAGE = ("Total footprint in RAM: {} bytes / {}KB\n"
"Free RAM available (heap): {} bytes\n"
"-------------------------------------------------------")
if resource_size and max_resource_size:
report = (LABEL.format(platform_name.upper(), bin_type.upper()) +
RESOURCE_SIZE.format(resource_size,
_convert_bytes_to_kilobytes(max_resource_size)) +
MEMORY_USAGE.format(app_size, _convert_bytes_to_kilobytes(max_ram), free_ram))
else:
report = (LABEL.format(platform_name.upper(), bin_type.upper()) +
MEMORY_USAGE.format(app_size, _convert_bytes_to_kilobytes(max_ram), free_ram))
return 'YELLOW', report
def app_resource_memory_error(platform_name, resource_size, max_resource_size):
"""
This method provides a formatted error message for printing to the console when the resource
size exceeds the maximum resource size supported by the Pebble firmware.
:param platform_name: the name of the current HW platform being targeted
:param resource_size: the size of the resource pack
:param max_resource_size: the maximum allowed size of the resource pack
:return: a tuple containing the color for the string print, and the string to print
"""
report = ("======================================================\n"
"Build failed: {}\n"
"Error: Resource pack is too large ({}KB / {}KB)\n"
"======================================================\n".
format(platform_name,
_convert_bytes_to_kilobytes(resource_size),
_convert_bytes_to_kilobytes(max_resource_size)))
return 'RED', report
def app_appstore_resource_memory_error(platform_name, resource_size, max_appstore_resource_size):
"""
This method provides a formatted warning message for printing to the console when the resource
pack size exceeds the maximum allowed resource size for the appstore.
:param platform_name: the name of the current HW platform being targeted
:param resource_size: the size of the resource pack
:param max_appstore_resource_size: the maximum appstore-allowed size of the resource pack
:return: a tuple containing the color for the string print, and the string to print
"""
report = ("WARNING: Your {} app resources are too large ({}KB / {}KB). You will not be "
"able "
"to publish your app.\n".
format(platform_name,
_convert_bytes_to_kilobytes(resource_size),
_convert_bytes_to_kilobytes(max_appstore_resource_size)))
return 'RED', report
def bytecode_memory_report(platform_name, bytecode_size, bytecode_max):
"""
This method provides a formatted string for printing the memory usage for this Rocky bytecode
file to the console.
:param platform_name: the name of the current HW platform being targeted
:param bytecode_size: the size of the bytecode file, in bytes
:param bytecode_max: the max allowed size of the bytecode file, in bytes
:return: a tuple containing the color for the string print, and the string to print
"""
LABEL = "-------------------------------------------------------\n{} MEMORY USAGE\n"
BYTECODE_USAGE = ("Total size of snapshot: {}KB / {}KB\n"
"-------------------------------------------------------")
report = (LABEL.format(platform_name.upper()) +
BYTECODE_USAGE.format(_convert_bytes_to_kilobytes(bytecode_size),
_convert_bytes_to_kilobytes(bytecode_max)))
return 'YELLOW', report
def simple_memory_report(platform_name, bin_size, resource_size=None):
"""
This method provides a formatted string for printing the memory usage for this binary to the
console.
:param platform_name: the name of the current HW platform being targeted
:param bin_size: the size of the binary
:param resource_size: the size of the resource pack
:return: a tuple containing the color for the string print, and the string to print
"""
LABEL = "-------------------------------------------------------\n{} MEMORY USAGE\n"
RESOURCE_SIZE = "Total size of resources: {} bytes\n"
MEMORY_USAGE = ("Total footprint in RAM: {} bytes\n"
"-------------------------------------------------------")
if resource_size:
report = (LABEL.format(platform_name.upper()) +
RESOURCE_SIZE.format(resource_size) +
MEMORY_USAGE.format(bin_size))
else:
report = (LABEL.format(platform_name.upper()) +
MEMORY_USAGE.format(bin_size))
return 'YELLOW', report

112
sdk/tools/pebble_package.py Normal file
View file

@ -0,0 +1,112 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import errno
import os
from shutil import rmtree
import zipfile
class MissingFileException(Exception):
pass
class DuplicatePackageFileException(Exception):
pass
def _calculate_file_size(path):
return os.stat(path).st_size
def _calculate_crc(path):
pass
class PebblePackage(object):
def __init__(self, package_filename):
self.package_filename = package_filename
self.package_files = {}
def add_file(self, name, file_path):
if not os.path.exists(file_path):
raise MissingFileException("The file '{}' does not exist".format(file_path))
if name in self.package_files and self.package_files.get(name) != file_path:
raise DuplicatePackageFileException("The file '{}' cannot be added to the package "
"because `{}` has already been assigned to `{}`".
format(file_path,
self.package_files.get(name),
name))
else:
self.package_files[name] = file_path
def pack(self, package_path=None):
with zipfile.ZipFile(os.path.join(package_path, self.package_filename), 'w') as zip_file:
for filename, file_path in self.package_files.iteritems():
zip_file.write(file_path, filename)
zip_file.comment = type(self).__name__
def unpack(self, package_path=''):
try:
rmtree(package_path)
except OSError as e:
if e.errno != errno.ENOENT:
raise e
with zipfile.ZipFile(self.package_filename, 'r') as zip_file:
zip_file.extractall(package_path)
class RockyPackage(PebblePackage):
def __init__(self, package_filename):
super(RockyPackage, self).__init__(package_filename)
def add_files(self, rockyjs, binaries, resources, pkjs, platforms):
for platform in platforms:
self.add_file(os.path.join(platform, rockyjs[platform]), rockyjs[platform])
self.add_file(os.path.join(platform, binaries[platform]), binaries[platform])
self.add_file(os.path.join(platform, resources[platform]), resources[platform])
self.add_file(pkjs, pkjs)
def write_manifest(self):
pass
class LibraryPackage(PebblePackage):
def __init__(self, package_filename="dist.zip"):
super(LibraryPackage, self).__init__(package_filename)
def add_files(self, includes, binaries, resources, js):
for include, include_path in includes.iteritems():
self.add_file(os.path.join('include', include), include_path)
for binary, binary_path in binaries.iteritems():
self.add_file(os.path.join('binaries', binary), binary_path)
for resource, resource_path in resources.iteritems():
self.add_file(os.path.join('resources', resource), resource_path)
for js_file, js_file_path in js.iteritems():
self.add_file(os.path.join('js', js_file), js_file_path)
def unpack(self, package_path='dist'):
super(LibraryPackage, self).unpack(package_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Manage Pebble packages")
parser.add_argument('command', type=str, help="Command to use")
parser.add_argument('filename', type=str, help="Path to your Pebble package")
args = parser.parse_args()
with zipfile.ZipFile(args.filename, 'r') as package:
cls = globals()[package.comment](args.filename)
getattr(cls, args.command)()

212
sdk/tools/rocky-lint/rocky.d.ts vendored Normal file
View file

@ -0,0 +1,212 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
declare namespace rocky {
// helper type to indicate that a commonly expected feature is planned but not implement, yet
interface IsNotImplementedInRockyYet {
_doesNotWork: any
}
interface Event {
type: string
}
interface DrawEvent extends Event {
context: CanvasRenderingContext2D
}
interface TickEvent extends Event {
date: Date
}
interface MemoryPressureEvent extends Event {
level: 'high';
}
interface MessageEvent extends Event {
data: any;
}
interface PostMessageConnectionEvent extends Event {
}
interface AnyEvent extends Event, DrawEvent, TickEvent, MemoryPressureEvent, MessageEvent, PostMessageConnectionEvent { }
interface CanvasRenderingContext2D {
canvas: CanvasElement
fillStyle: string
font: string // TODO list actually supported fonts
lineWidth: number
strokeStyle: string
textAlign: string // TODO list actually supported values
textBaseline: IsNotImplementedInRockyYet
arc(x: number, y: number, radius: number, startAngle: number, endAngle: number, anticlockwise?: boolean): void
arcTo(IsNotImplementedInRockyYet : number, y1: number, x2: number, y2: number, radius: number): void
beginPath(): void
bezierCurveTo(cp1x: IsNotImplementedInRockyYet , cp1y: number, cp2x: number, cp2y: number, x: number, y: number): void
clearRect(x: number, y: number, w: number, h: number): void
closePath(): void
drawImage(image: IsNotImplementedInRockyYet, offsetX: number, offsetY: number, width?: number, height?: number, canvasOffsetX?: number, canvasOffsetY?: number, canvasImageWidth?: number, canvasImageHeight?: number): void
fill(fillRule?: string): void
fillRect(x: number, y: number, w: number, h: number): void
fillText(text: string, x: number, y: number, maxWidth?: number): void
lineTo(x: number, y: number): void
measureText(text: string): TextMetrics
moveTo(x: number, y: number): void
quadraticCurveTo(cpx: IsNotImplementedInRockyYet, cpy: number, x: number, y: number): void
rect(x: number, y: number, w: number, h: number): void
restore(): void
rotate(angle: IsNotImplementedInRockyYet): void
save(): void
scale(x: IsNotImplementedInRockyYet , y: number): void
setTransform(m11: IsNotImplementedInRockyYet, m12: number, m21: number, m22: number, dx: number, dy: number): void
stroke(): void
strokeRect(x: number, y: number, w: number, h: number): void
transform(m11: IsNotImplementedInRockyYet, m12: number, m21: number, m22: number, dx: number, dy: number): void
translate(x: IsNotImplementedInRockyYet , y: number): void
rockyFillRadial(x: number, y: number, innerRadius: number, outerRadius: number, startAngle: number, endAngle: number): void
}
interface TextMetrics {
width: number
height: number
}
interface CanvasElement {
clientWidth: number
clientHeight: number
unobstructedWidth: number
unobstructedHeight: number
unobstructedTop: number
unobstructedLeft: number
}
interface WatchInfo {
platform: string
model: string
language: string
firmware: { major: number, minor: number, patch: number, suffix: string }
}
interface UserPreferences {
contentSize: "small" | "medium" | "large" | "x-large"
}
interface Rocky {
on(eventName: "draw", eventListener: (event: DrawEvent) => void): void
on(eventName: "memorypressure", eventListener: (event: MemoryPressureEvent) => void): void
on(eventName: "message", eventListener: (event: MessageEvent) => void): void
on(eventName: "postmessageconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
on(eventName: "postmessagedisconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
on(eventName: "postmessageerror", eventListener: (event: MessageEvent) => void): void
on(eventName: "hourchange", eventListener: (event: TickEvent) => void): void
on(eventName: "minutechange", eventListener: (event: TickEvent) => void): void
on(eventName: "secondchange", eventListener: (event: TickEvent) => void): void
on(eventName: "daychange", eventListener: (event: TickEvent) => void): void
on(eventName: string, eventListener: (event: AnyEvent) => void): void
addEventListener(eventName: "draw", eventListener: (event: DrawEvent) => void): void
addEventListener(eventName: "memorypressure", eventListener: (event: MemoryPressureEvent) => void): void
addEventListener(eventName: "message", eventListener: (event: MessageEvent) => void): void
addEventListener(eventName: "postmessageconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
addEventListener(eventName: "postmessagedisconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
addEventListener(eventName: "postmessageerror", eventListener: (event: MessageEvent) => void): void
addEventListener(eventName: "hourchange", eventListener: (event: TickEvent) => void): void
addEventListener(eventName: "minutechange", eventListener: (event: TickEvent) => void): void
addEventListener(eventName: "secondchange", eventListener: (event: TickEvent) => void): void
addEventListener(eventName: "daychange", eventListener: (event: TickEvent) => void): void
addEventListener(eventName: string, eventListener: (event: AnyEvent) => void): void
off(eventName: "draw", eventListener: (event: DrawEvent) => void): void
off(eventName: "memorypressure", eventListener: (event: MemoryPressureEvent) => void): void
off(eventName: "message", eventListener: (event: MessageEvent) => void): void
off(eventName: "postmessageconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
off(eventName: "postmessagedisconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
off(eventName: "postmessageerror", eventListener: (event: MessageEvent) => void): void
off(eventName: "hourchange", eventListener: (event: TickEvent) => void): void
off(eventName: "minutechange", eventListener: (event: TickEvent) => void): void
off(eventName: "secondchange", eventListener: (event: TickEvent) => void): void
off(eventName: "daychange", eventListener: (event: TickEvent) => void): void
off(eventName: string, eventListener: (event: AnyEvent) => void): void
removeEventListener(eventName: "draw", eventListener: (event: DrawEvent) => void): void
removeEventListener(eventName: "memorypressure", eventListener: (event: MemoryPressureEvent) => void): void
removeEventListener(eventName: "message", eventListener: (event: MessageEvent) => void): void
removeEventListener(eventName: "postmessageconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
removeEventListener(eventName: "postmessagedisconnected", eventListener: (event: PostMessageConnectionEvent) => void): void
removeEventListener(eventName: "postmessageerror", eventListener: (event: MessageEvent) => void): void
removeEventListener(eventName: "hourchange", eventListener: (event: TickEvent) => void): void
removeEventListener(eventName: "minutechange", eventListener: (event: TickEvent) => void): void
removeEventListener(eventName: "secondchange", eventListener: (event: TickEvent) => void): void
removeEventListener(eventName: "daychange", eventListener: (event: TickEvent) => void): void
removeEventListener(eventName: string, eventListener: (event: AnyEvent) => void): void
postMessage(message: any): void
requestDraw(): void
watchInfo: WatchInfo
userPreferences: UserPreferences
Event: Event
CanvasRenderingContext2D: CanvasRenderingContext2D
CanvasElement: CanvasElement
}
}
declare module 'rocky' {
var rocky: rocky.Rocky;
export = rocky
}
interface Console {
error(message?: string, ...optionalParams: any[]): void
log(message?: string, ...optionalParams: any[]): void
warn(message?: string, ...optionalParams: any[]): void
}
declare var console: Console;
interface clearInterval {
(handle: number): void
}
declare var clearInterval: clearInterval;
interface clearTimeout {
(handle: number): void
}
declare var clearTimeout: clearTimeout;
interface setInterval {
(handler: (...args: any[]) => void, timeout: number): number
}
declare var setInterval: setInterval;
interface setTimeout {
(handler: (...args: any[]) => void, timeout: number): number
}
declare var setTimeout: setTimeout;
interface Require {
(id: string): any
}
interface RockyRequire extends Require {
(id: 'rocky'): rocky.Rocky
}
declare var require: RockyRequire;
interface Module {
exports: any
}
declare var module: Module;

View file

@ -0,0 +1,7 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema",
"description": "A project containing a Pebble application",
"type": "object",
"$ref": "file_types.json#/appinfo-json"
}

View file

@ -0,0 +1,76 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema for Attributes",
"description": "Schema for each type of valid attribute in Pebble projects",
"appKeys": {
"type": "object",
"patternProperties": {
"^\\w*$": { "$ref": "data_types.json#/UInt32" }
},
"additionalProperties": false
},
"capabilities": {
"type": "array",
"items": { "enum": ["location", "configurable", "health"] },
"uniqueItems": true
},
"messageKeys": {
"oneOf": [
{ "$ref": "attributes.json#/appKeys" },
{ "$ref": "data_types.json#/identifierArray" }
]
},
"resources": {
"type": "object",
"properties": {
"media": {
"type": "array",
"items": {
"type": "object",
"oneOf": [
{ "$ref": "resource_types.json#/bitmap" },
{ "$ref": "resource_types.json#/deprecatedImageFormat" },
{ "$ref": "resource_types.json#/font" },
{ "$ref": "resource_types.json#/raw" }
]
},
"uniqueItems": true
},
"publishedMedia": {
"type": "array",
"items": {
"type": "object",
"oneOf": [
{ "$ref": "resource_types.json#/publishedMediaAlias" },
{ "$ref": "resource_types.json#/publishedMediaGlance" },
{ "$ref": "resource_types.json#/publishedMediaTimeline" }
]
},
"uniqueItems": true
}
},
"additionalProperties": false,
"dependencies": {
"publishedMedia": [ "media" ]
}
},
"sdkVersion": { "enum": [ "2", "3" ] },
"targetPlatforms": {
"type": "array",
"items": { "enum": [ "aplite", "basalt", "chalk", "diorite" ] },
"uniqueItems": true
},
"uuid": {
"type": "string",
"pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"
},
"watchapp": {
"type": "object",
"properties": {
"watchface": { "type": "boolean" },
"hiddenApp": { "type": "boolean" },
"onlyShownOnCommunication": { "type": "boolean" }
},
"additionalProperties": false
}
}

View file

@ -0,0 +1,27 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema for Types",
"description": "Schema for complex data types in Pebble projects",
"UInt8": {
"type": "integer",
"minimum": 0,
"maximum": 255
},
"UInt32": {
"type": "integer",
"minimum": 0,
"maximum": 4294967295
},
"identifier": {
"type": "string",
"pattern": "^\\w*$"
},
"identifierArray": {
"type": "array",
"items": { "$ref": "#/identifier" }
},
"stringArray": {
"type": "array",
"items": { "type": "string" }
}
}

View file

@ -0,0 +1,67 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema for Project JSON Files",
"description": "Schema for supported JSON Pebble project files",
"package-json": {
"properties": {
"name": { "type": "string" },
"author": {
"description": "https://docs.npmjs.com/files/package.json#people-fields-author-contributors",
"oneOf": [
{
"type": "string",
"pattern": "^([^<(]+?)?[ \\\\t]*(?:<([^>(]+?)>)?[ \\\\t]*(?:\\\\(([^)]+?)\\\\)|$)"
},
{
"type": "object",
"properties": {
"name": { "type": "string" },
"email": { "type": "string" },
"url": { "type": "string" }
},
"additionalProperties": false
}
]
},
"version": { "type": "string" },
"keywords": { "$ref": "data_types.json#/stringArray" },
"private": { "type": "boolean" },
"dependencies": {
"type": "object",
"patternProperties": {
".": { "type": "string" }
},
"additionalProperties": false
},
"files": { "$ref": "data_types.json#/stringArray" },
"pebble": {
"type": "object",
"oneOf": [
{ "$ref": "project_types.json#/native-app" },
{ "$ref": "project_types.json#/rocky-app" },
{ "$ref": "project_types.json#/package" }
]
}
},
"required": [ "name", "author", "version", "pebble" ]
},
"appinfo-json": {
"properties": {
"uuid": { "$ref": "attributes.json#/uuid" },
"shortName": { "type": "string" },
"longName": { "type": "string" },
"companyName": { "type": "string" },
"versionCode": { "$ref": "data_types.json#/UInt8" },
"versionLabel": { "type": "string" },
"sdkVersion": { "$ref": "attributes.json#/sdkVersion" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" },
"watchapp": { "$ref": "attributes.json#/watchapp" },
"appKeys": { "$ref": "attributes.json#/appKeys" },
"resources": { "$ref": "attributes.json#/resources" },
"capabilities": { "$ref": "attributes.json#/capabilities" },
"enableMultiJS": { "type": "boolean" },
"projectType": { "enum": [ "native", "pebblejs" ] }
},
"required": ["uuid", "longName", "companyName", "versionLabel"]
}
}

View file

@ -0,0 +1,7 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema",
"description": "A project containing a Pebble application",
"type": "object",
"$ref": "file_types.json#/package-json"
}

View file

@ -0,0 +1,43 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema for Project Types",
"description": "Schema for each type of valid Pebble project",
"native-app": {
"properties": {
"displayName": { "type": "string" },
"uuid": { "$ref": "attributes.json#/uuid" },
"sdkVersion": { "$ref": "attributes.json#/sdkVersion" },
"projectType": { "enum": [ "native" ] },
"enableMultiJS": { "type": "boolean" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" },
"watchapp": { "$ref": "attributes.json#/watchapp" },
"capabilities": { "$ref": "attributes.json#/capabilities" },
"appKeys": { "$ref": "attributes.json#/appKeys" },
"messageKeys": { "$ref": "attributes.json#/messageKeys" }
},
"required": [ "displayName", "uuid", "sdkVersion" ]
},
"rocky-app": {
"properties": {
"displayName": { "type": "string" },
"uuid": { "$ref": "attributes.json#/uuid" },
"sdkVersion": { "$ref": "attributes.json#/sdkVersion" },
"projectType": { "enum": [ "rocky" ] },
"enableMultiJS": { "type": "boolean" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" },
"watchapp": { "$ref": "attributes.json#/watchapp" },
"capabilities": { "$ref": "attributes.json#/capabilities" }
},
"required": [ "displayName", "uuid", "sdkVersion", "projectType" ]
},
"package": {
"properties": {
"sdkVersion": { "$ref": "attributes.json#/sdkVersion" },
"projectType": { "enum": [ "package" ] },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" },
"capabilities": { "$ref": "attributes.json#/capabilities" },
"messageKeys": { "$ref": "attributes.json#/messageKeys" }
},
"required": [ "sdkVersion", "projectType" ]
}
}

View file

@ -0,0 +1,84 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Pebble JSON Schema for Resource Types",
"description": "Schema for each type of valid resource in Pebble projects",
"bitmap": {
"properties": {
"name": { "$ref": "data_types.json#/identifier" },
"type": { "enum": ["bitmap"] },
"file": { "type": "string" },
"menuIcon": { "type": "boolean" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" },
"storageFormat": { "enum": [ "pbi", "png" ] },
"memoryFormat": {
"enum": [
"Smallest",
"SmallestPalette",
"1Bit",
"8Bit",
"1BitPalette",
"2BitPalette",
"4BitPalette"
]
},
"spaceOptimization": { "enum": [ "storage", "memory" ] }
}
},
"deprecatedImageFormat": {
"properties": {
"name": { "$ref": "data_types.json#/identifier" },
"type": { "enum": ["png", "pbi", "pbi8", "png-trans"] },
"file": { "type": "string" },
"menuIcon": { "type": "boolean" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" }
},
"required": ["name", "type", "file"]
},
"font": {
"properties": {
"name": { "$ref": "data_types.json#/identifier" },
"type": { "enum": ["font"] },
"file": { "type": "string" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" },
"characterRegex": { "type": "string" }
},
"required": ["name", "type", "file"]
},
"raw": {
"properties": {
"name": { "$ref": "data_types.json#/identifier" },
"type": { "enum": ["raw"] },
"file": { "type": "string" },
"targetPlatforms": { "$ref": "attributes.json#/targetPlatforms" }
},
"required": ["name", "type", "file"]
},
"publishedMediaItem": {
"name": { "$ref": "data_types.json#/identifier" },
"id": { "$ref": "data_types.json#/UInt32" },
"alias": { "$ref": "data_types.json#/identifier" },
"glance": { "$ref": "data_types.json#/identifier" },
"timeline": {
"type": "object",
"properties": {
"tiny": { "$ref": "data_types.json#/identifier" },
"small": { "$ref": "data_types.json#/identifier" },
"large": { "$ref": "data_types.json#/identifier" }
},
"required": [ "tiny" ]
}
},
"publishedMediaAlias": {
"properties": { "$ref": "#/publishedMediaItem" },
"required": ["name", "id", "alias"]
},
"publishedMediaGlance": {
"properties": { "$ref": "#/publishedMediaItem" },
"required": ["name", "id", "glance"]
},
"publishedMediaTimeline": {
"properties": { "$ref": "#/publishedMediaItem" },
"required": ["name", "id", "timeline"]
}
}

View file

@ -0,0 +1,40 @@
/**
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var fs = require('fs');
module.exports = function(source) {
// Set this loader to cacheable
this.cacheable();
// Whitelist files in the current project
var whitelisted_folders = [this.options.context];
// Whitelist files from the SDK-appended search paths
whitelisted_folders = whitelisted_folders.concat(this.options.resolve.root);
// Iterate over whitelisted file paths
for (var i=0; i<whitelisted_folders.length; i++) {
// If resource file is from a whitelisted path, return source
if (~this.resourcePath.indexOf(fs.realpathSync(whitelisted_folders[i]))) {
return source;
}
}
// If the resource file is not from a whitelisted path, emit an error and fail the build
this.emitError("Requiring a file outside of the current project folder is not permitted.");
return "";
};

View file

@ -0,0 +1,96 @@
////////////////////////////////////////////////////////////////////////////////
// Template vars injected by projess_js.py:
// boolean
const isSandbox = ${IS_SANDBOX};
// Array with absolute file path strings
const entryFilenames = ${ENTRY_FILENAMES};
// folder path string
const outputPath = ${OUTPUT_PATH};
// file name string
const outputFilename = ${OUTPUT_FILENAME};
// Array with absolute folder path strings
const resolveRoots = ${RESOLVE_ROOTS};
// Object, { alias1: 'path1', ... }
const resolveAliases = ${RESOLVE_ALIASES};
// null or Object with key 'sourceMapFilename'
const sourceMapConfig = ${SOURCE_MAP_CONFIG};
////////////////////////////////////////////////////////////////////////////////
// NOTE: Must escape dollar-signs, because this is a Python template!
const webpack = require('webpack');
module.exports = (() => {
// The basic config:
const config = {
entry: entryFilenames,
output: {
path: outputPath,
filename: outputFilename
},
target: 'node',
resolve: {
root: resolveRoots,
extensions: ['', '.js', '.json'],
alias: resolveAliases
},
resolveLoader: {
root: resolveRoots
}
};
if (sourceMapConfig) {
// Enable webpack's source map output:
config.devtool = 'source-map';
config.output.sourceMapFilename = sourceMapConfig.sourceMapFilename;
config.output.devtoolModuleFilenameTemplate = '[resource-path]';
config.output.devtoolFallbackModuleFilenameTemplate = '[resourcePath]?[hash]';
}
return config;
})();
module.exports.plugins = (() => {
const plugins = [
// Returns a non-zero exit code when webpack reports an error:
require('webpack-fail-plugin'),
// Includes _message_keys_wrapper in every build to mimic old loader.js:
new webpack.ProvidePlugin({ require: '_message_key_wrapper' })
];
if (isSandbox) {
// Prevents using `require('evil_loader!mymodule')` to execute custom
// loader code during the webpack build.
const RestrictResourcePlugin = require('restrict-resource-webpack-plugin');
const plugin = new RestrictResourcePlugin(/!+/,
'Custom inline loaders are not permitted.');
plugins.push(plugin);
}
return plugins;
})();
module.exports.module = {
loaders: (() => {
const loaders = [{'test': /\.json$$/, 'loader': 'json-loader'}];
if (isSandbox) {
// See restricted-resource-loader.js, prevents loading files outside
// of the project folder, i.e. `require(../../not_your_business)`:
const restrictLoader = {
'test': /^.*/, 'loader': 'restricted-resource-loader'
};
loaders.push(restrictLoader);
}
return loaders;
})()
};

41
sdk/update-waf.sh Executable file
View file

@ -0,0 +1,41 @@
#!/bin/sh
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Run this script to update waf to a newer version and get rid of the
# files we do not need for Pebble SDK.
set -x
VERSION=1.7.11
DOWNLOAD="http://waf.googlecode.com/files/waf-$VERSION.tar.bz2"
TMPFILE=`mktemp -t waf-tar-bz`
# Remove existing waf folder
rm -fr waf
# Download and extract what we need from waf distrib
wget -O - $DOWNLOAD |tar -yx \
--include "waf-$VERSION/waf-light" \
--include "waf-$VERSION/waflib/*" \
--include "waf-$VERSION/wscript" \
--exclude "waf-$VERSION/waflib/extras" \
-s "/waf-$VERSION/waf/"
# Add some python magic for our lib to work
# (they will be copied in extras and require the init)
mkdir waf/waflib/extras
touch waf/waflib/extras/__init__.py

View file

@ -0,0 +1 @@
["pebble-tool>=4.4-beta4"]

343
sdk/waftools/pebble_sdk.py Normal file
View file

@ -0,0 +1,343 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from waflib.Configure import conf
from waflib.Errors import ConfigurationError
from waflib import Logs
import sdk_paths
from generate_appinfo import generate_appinfo_c
from process_sdk_resources import generate_resources
import report_memory_usage
from sdk_helpers import (configure_libraries, configure_platform, find_sdk_component,
get_target_platforms, truncate_to_32_bytes, validate_message_keys_object)
def _extract_project_info(conf, info_json, json_filename):
"""
Extract project info from "pebble" object, or copy configuration directly if read from
appinfo.json
:param conf: the ConfigurationContext
:param info_json: the JSON blob contained in appinfo.json or package.json
:return: JSON blob containing project information for build
"""
if 'pebble' in info_json:
project_info = info_json['pebble']
validate_message_keys_object(conf, project_info, 'package.json')
project_info['name'] = info_json['name']
project_info['shortName'] = project_info['longName'] = project_info['displayName']
# Validate version specified in package.json to avoid issues later
if not info_json['version']:
conf.fatal("Project is missing a version")
version = _validate_version(conf, info_json['version'])
project_info['versionLabel'] = version
if isinstance(info_json['author'], basestring):
project_info['companyName'] = (
info_json['author'].split('(', 1)[0].split('<', 1)[0].strip())
elif isinstance(info_json['author'], dict) and 'name' in info_json['author']:
project_info['companyName'] = info_json['author']['name']
else:
conf.fatal("Missing author name in project info")
elif 'package.json' == json_filename:
try:
with open(conf.path.get_src().find_node('appinfo.json').abspath(), 'r') as f:
info_json = json.load(f)
except AttributeError:
conf.fatal("Could not find Pebble project info in package.json and no appinfo.json file"
" exists")
project_info = info_json
validate_message_keys_object(conf, project_info, 'appinfo.json')
else:
project_info = info_json
validate_message_keys_object(conf, project_info, 'appinfo.json')
return project_info
def _generate_appinfo_c_file(task):
"""
This Task generates the appinfo.auto.c file that is included in binary metadata
:param task: the instance of this task
:return: N/A
"""
info_json = dict(getattr(task.generator.env, task.vars[0]))
info_json['shortName'] = truncate_to_32_bytes(info_json['shortName'])
info_json['companyName'] = truncate_to_32_bytes(info_json['companyName'])
current_platform = task.generator.env.PLATFORM_NAME
generate_appinfo_c(info_json, task.outputs[0].abspath(), current_platform)
def _write_appinfo_json_file(task):
"""
This task writes the content of the PROJECT_INFO environment variable to appinfo.json in the
build directory. PROJECT_INFO is generated from reading in either a package.json file or an
old-style appinfo.json file.
:param task: the task instance
:return: None
"""
appinfo = dict(getattr(task.generator.env, task.vars[0]))
capabilities = appinfo.get('capabilities', [])
for lib in dict(task.generator.env).get('LIB_JSON', []):
if 'pebble' in lib:
capabilities.extend(lib['pebble'].get('capabilities', []))
appinfo['capabilities'] = list(set(capabilities))
for key in task.env.BLOCK_MESSAGE_KEYS:
del appinfo['appKeys'][key]
if appinfo:
with open(task.outputs[0].abspath(), 'w') as f:
json.dump(appinfo, f, indent=4)
else:
task.generator.bld.fatal("Unable to find project info to populate appinfo.json file with")
def _validate_version(ctx, original_version):
"""
Validates the format of the version field in an app's project info, and strips off a
zero-valued patch version number, if it exists, to be compatible with the Pebble FW
:param ctx: the ConfigureContext object
:param version: the version provided in project info (package.json/appinfo.json)
:return: a MAJOR.MINOR version that is acceptable for Pebble FW
"""
version = original_version.split('.')
if len(version) > 3:
ctx.fatal("App versions must be of the format MAJOR or MAJOR.MINOR or MAJOR.MINOR.0. An "
"invalid version of {} was specified for the app. Try {}.{}.0 instead".
format(original_version, version[0], version[1]))
elif not (0 <= int(version[0]) <= 255):
ctx.fatal("An invalid or out of range value of {} was specified for the major version of "
"the app. The valid range is 0-255.".format(version[0]))
elif not (0 <= int(version[1]) <= 255):
ctx.fatal("An invalid or out of range value of {} was specified for the minor version of "
"the app. The valid range is 0-255.".format(version[1]))
elif len(version) > 2 and not (int(version[2]) == 0):
ctx.fatal("The patch version of an app must be 0, but {} was specified ({}). Try {}.{}.0 "
"instead.".
format(version[2], original_version, version[0], version[1]))
return version[0] + '.' + version[1]
def options(opt):
"""
Specify the options available when invoking waf; uses OptParse
:param opt: the OptionContext object
:return: N/A
"""
opt.load('pebble_sdk_common')
opt.add_option('-t', '--timestamp', dest='timestamp',
help="Use a specific timestamp to label this package (ie, your repository's "
"last commit time), defaults to time of build")
def configure(conf):
"""
Configure the build using information obtained from a JSON file
:param conf: the ConfigureContext object
:return: N/A
"""
conf.load('pebble_sdk_common')
# This overrides the default config in pebble_sdk_common.py
if conf.options.timestamp:
conf.env.TIMESTAMP = conf.options.timestamp
conf.env.BUNDLE_NAME = "app_{}.pbw".format(conf.env.TIMESTAMP)
else:
conf.env.BUNDLE_NAME = "{}.pbw".format(conf.path.name)
# Read in package.json for environment configuration, or fallback to appinfo.json for older
# projects
info_json_node = (conf.path.get_src().find_node('package.json') or
conf.path.get_src().find_node('appinfo.json'))
if info_json_node is None:
conf.fatal('Could not find package.json')
with open(info_json_node.abspath(), 'r') as f:
info_json = json.load(f)
project_info = _extract_project_info(conf, info_json, info_json_node.name)
conf.env.PROJECT_INFO = project_info
conf.env.BUILD_TYPE = 'rocky' if project_info.get('projectType', None) == 'rocky' else 'app'
if getattr(conf.env.PROJECT_INFO, 'enableMultiJS', False):
if not conf.env.WEBPACK:
conf.fatal("'enableMultiJS' is set to true, but unable to locate webpack module at {} "
"Please set enableMultiJS to false, or reinstall the SDK.".
format(conf.env.NODE_PATH))
if conf.env.BUILD_TYPE == 'rocky':
conf.find_program('node nodejs', var='NODE',
errmsg="Unable to locate the Node command. "
"Please check your Node installation and try again.")
c_files = [c_file.path_from(conf.path.find_node('src'))
for c_file in conf.path.ant_glob('src/**/*.c')]
if c_files:
Logs.pprint('YELLOW', "WARNING: C source files are not supported for Rocky.js "
"projects. The following C files are being skipped: {}".
format(c_files))
if 'resources' in project_info and 'media' in project_info['resources']:
conf.env.RESOURCES_JSON = project_info['resources']['media']
if 'publishedMedia' in project_info['resources']:
conf.env.PUBLISHED_MEDIA_JSON = project_info['resources']['publishedMedia']
conf.env.REQUESTED_PLATFORMS = project_info.get('targetPlatforms', [])
conf.env.LIB_DIR = "node_modules"
get_target_platforms(conf)
# With new-style projects, check for libraries specified in package.json
if 'dependencies' in info_json:
configure_libraries(conf, info_json['dependencies'])
conf.load('process_message_keys')
# base_env is set to a shallow copy of the current ConfigSet for this ConfigureContext
base_env = conf.env
for platform in conf.env.TARGET_PLATFORMS:
# Create a deep copy of the `base_env` ConfigSet and set conf.env to a shallow copy of
# the resultant ConfigSet
conf.setenv(platform, base_env)
configure_platform(conf, platform)
# conf.env is set back to a shallow copy of the default ConfigSet stored in conf.all_envs['']
conf.setenv('')
def build(bld):
"""
This method is invoked from a project's wscript with the `ctx.load('pebble_sdk')` call and
sets up all of the task generators for the SDK. After all of the build methods have run,
the configured task generators will run, generating build tasks and managing dependencies.
See https://waf.io/book/#_task_generators for more details on task generator setup.
:param bld: the BuildContext object
:return: N/A
"""
bld.load('pebble_sdk_common')
# cached_env is set to a shallow copy of the current ConfigSet for this BuildContext
cached_env = bld.env
for platform in bld.env.TARGET_PLATFORMS:
# bld.env is set to a shallow copy of the ConfigSet labeled <platform>
bld.env = bld.all_envs[platform]
# Set the build group (set of TaskGens) to the group labeled <platform>
if bld.env.USE_GROUPS:
bld.set_group(bld.env.PLATFORM_NAME)
# Generate an appinfo file specific to the current platform
build_node = bld.path.get_bld().make_node(bld.env.BUILD_DIR)
bld(rule=_generate_appinfo_c_file,
target=build_node.make_node('appinfo.auto.c'),
vars=['PROJECT_INFO'])
# Generate an appinfo.json file for the current platform to bundle in a PBW
bld(rule=_write_appinfo_json_file,
target=bld.path.get_bld().make_node('appinfo.json'),
vars=['PROJECT_INFO'])
# Generate resources specific to the current platform
resource_node = None
if bld.env.RESOURCES_JSON:
try:
resource_node = bld.path.find_node('resources')
except AttributeError:
bld.fatal("Unable to locate resources at resources/")
# Adding the Rocky.js source file needs to happen before the setup of the Resource
# Generators
if bld.env.BUILD_TYPE == 'rocky':
rocky_js_file = bld.path.find_or_declare('resources/rocky-app.js')
rocky_js_file.parent.mkdir()
bld.pbl_js_build(source=bld.path.ant_glob(['src/rocky/**/*.js',
'src/common/**/*.js']),
target=rocky_js_file)
resource_node = bld.path.get_bld().make_node('resources')
bld.env.RESOURCES_JSON = [{'type': 'js',
'name': 'JS_SNAPSHOT',
'file': rocky_js_file.path_from(resource_node)}]
resource_path = resource_node.path_from(bld.path) if resource_node else None
generate_resources(bld, resource_path)
# Running `pbl_build` needs to happen after the setup of the Resource Generators so
# `report_memory_usage` is aware of the existence of the JS bytecode file
if bld.env.BUILD_TYPE == 'rocky':
rocky_c_file = build_node.make_node('src/rocky.c')
bld(rule='cp "${SRC}" "${TGT}"',
source=find_sdk_component(bld, bld.env, 'include/rocky.c'),
target=rocky_c_file)
# Check for rocky script (This is done in `build` to preserve the script as a node
# instead of as an absolute path as would be required in `configure`. This is to keep
# the signatures the same for both FW builds and SDK builds.
if not bld.env.JS_TOOLING_SCRIPT:
bld.fatal("Unable to locate tooling for this Rocky.js app build. Please "
"try re-installing this version of the SDK.")
bld.pbl_build(source=[rocky_c_file],
target=build_node.make_node("pebble-app.elf"),
bin_type='rocky')
# bld.env is set back to a shallow copy of the original ConfigSet that was set when this `build`
# method was invoked
bld.env = cached_env
@conf
def pbl_program(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_program()`. We
set the custom features `c`, `cprogram` and `pebble_cprogram` to run when this method is
invoked.
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source C files to be built and linked
target - the destination binary file for the compiled source
:return: a task generator instance with keyword arguments specified
"""
kw['bin_type'] = 'app'
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
kw['app'] = kw['target']
kw['resources'] = (
self.path.find_or_declare(self.env.BUILD_DIR).make_node('app_resources.pbpack'))
return self(*k, **kw)
@conf
def pbl_worker(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_worker()`. We set
the custom features `c`, `cprogram` and `pebble_cprogram` to run when this method is invoked.
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source C files to be built and linked
target - the destination binary file for the compiled source
:return: a task generator instance with keyword arguments specified
"""
kw['bin_type'] = 'worker'
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
kw['worker'] = kw['target']
return self(*k, **kw)

View file

@ -0,0 +1,374 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import types
from waflib import Logs
from waflib.Configure import conf
from waflib.Task import Task
from waflib.TaskGen import after_method, before_method, feature
from waflib.Tools import c, c_preproc
import ldscript, process_bundle, process_headers, process_js, report_memory_usage, xcode_pebble
from pebble_sdk_platform import maybe_import_internal
from sdk_helpers import (append_to_attr, find_sdk_component, get_node_from_abspath,
wrap_task_name_with_platform)
# Override the default waf task __str__ method to include display of the HW platform being targeted
Task.__str__ = wrap_task_name_with_platform
def options(opt):
"""
Specify the options available when invoking waf; uses OptParse. This method is called from
app and lib waftools by `opt.load('pebble_sdk_common')`
:param opt: the OptionContext object
:return: N/A
"""
opt.load('gcc')
opt.add_option('-d', '--debug', action='store_true', default=False, dest='debug',
help='Build in debug mode')
opt.add_option('--no-groups', action='store_true', default=False, dest='no_groups')
opt.add_option('--sandboxed-build', action='store_true', default=False, dest='sandbox')
def configure(conf):
"""
Configure the tools for the build by locating SDK prerequisites on the filesystem
:param conf: the ConfigureContext
:return: N/A
"""
if not conf.options.debug:
conf.env.append_value('DEFINES', 'RELEASE')
else:
Logs.pprint("CYAN", "Debug enabled")
if conf.options.no_groups:
conf.env.USE_GROUPS = False
else:
conf.env.USE_GROUPS = True
conf.env.SANDBOX = conf.options.sandbox
conf.env.VERBOSE = conf.options.verbose
conf.env.TIMESTAMP = int(time.time())
# If waf is in ~/pebble-dev/PebbleSDK-X.XX/waf
# Then this file is in ~/pebble-dev/PebbleSDK-X.XX/.waflib-xxxx/waflib/extras/
# => we need to go up 3 directories to find the folder containing waf
pebble_sdk = conf.root.find_dir(os.path.dirname(__file__)).parent.parent.parent
if pebble_sdk is None:
conf.fatal("Unable to find Pebble SDK!\n"
"Please make sure you are running waf directly from your SDK.")
conf.env.PEBBLE_SDK_ROOT = pebble_sdk.abspath()
# Set location of Pebble SDK common folder
pebble_sdk_common = pebble_sdk.find_node('common')
conf.env.PEBBLE_SDK_COMMON = pebble_sdk_common.abspath()
if 'NODE_PATH' in os.environ:
conf.env.NODE_PATH = conf.root.find_node(os.environ['NODE_PATH']).abspath()
webpack_path = conf.root.find_node(conf.env.NODE_PATH).find_node('.bin').abspath()
try:
conf.find_program('webpack', path_list=[webpack_path])
except conf.errors.ConfigurationError:
pass # Error will be caught after checking for enableMultiJS setting
else:
Logs.pprint('YELLOW', "WARNING: Unable to find $NODE_PATH variable required for SDK "
"build. Please verify this build was initiated with a recent "
"pebble-tool.")
maybe_import_internal(conf.env)
def build(bld):
"""
This method is invoked from the app or lib waftool with the `bld.load('pebble_sdk_common')`
call and sets up additional task generators for the SDK.
:param bld: the BuildContext object
:return: N/A
"""
# cached_env is set to a shallow copy of the current ConfigSet for this BuildContext
bld.env = bld.all_envs['']
bld.load('file_name_c_define')
# Process message keys
bld(features='message_keys')
cached_env = bld.env
for platform in bld.env.TARGET_PLATFORMS:
# bld.env is set to a shallow copy of the ConfigSet labeled <platform>
bld.env = bld.all_envs[platform]
# Create a build group (set of TaskGens) for <platform>
if bld.env.USE_GROUPS:
bld.add_group(bld.env.PLATFORM_NAME)
# Generate a linker script specific to the current platform
build_node = bld.path.get_bld().find_or_declare(bld.env.BUILD_DIR)
bld(features='subst',
source=find_sdk_component(bld, bld.env, 'pebble_app.ld.template'),
target=build_node.make_node('pebble_app.ld.auto'),
**bld.env.PLATFORM)
# Locate Rocky JS tooling script
js_tooling_script = find_sdk_component(bld, bld.env, 'tools/generate_snapshot.js')
bld.env.JS_TOOLING_SCRIPT = js_tooling_script if js_tooling_script else None
# bld.env is set back to a shallow copy of the original ConfigSet that was set when this
# `build` method was invoked
bld.env = cached_env
# Create a build group for bundling (should run after the build groups for each platform)
if bld.env.USE_GROUPS:
bld.add_group('bundle')
def _wrap_c_preproc_scan(task):
"""
This function is a scanner function that wraps c_preproc.scan to fix up pebble.h dependencies.
pebble.h is outside out the bld/src trees so therefore it's not considered a valid dependency
and isn't scanned for further dependencies. Normally this would be fine but pebble.h includes
an auto-generated resource id header which is really a dependency. We detect this include and
add the resource id header file to the nodes being scanned by c_preproc.
:param task: the task instance
:return: N/A
"""
(nodes, names) = c_preproc.scan(task)
if 'pebble.h' in names:
nodes.append(get_node_from_abspath(task.generator.bld, task.env.RESOURCE_ID_HEADER))
nodes.append(get_node_from_abspath(task.generator.bld, task.env.MESSAGE_KEYS_HEADER))
return nodes, names
@feature('c')
@before_method('process_source')
def setup_pebble_c(task_gen):
"""
This method is called before all of the c aliases (objects, shlib, stlib, program, etc) and
ensures that the SDK `include` path for the current platform, as well as the project root
directory and the project src directory are included as header search paths (includes) for the
build.
:param task_gen: the task generator instance
:return: N/A
"""
platform = task_gen.env.PLATFORM_NAME
append_to_attr(task_gen, 'includes',
[find_sdk_component(task_gen.bld, task_gen.env, 'include'),
'.', 'include', 'src'])
append_to_attr(task_gen, 'includes', platform)
for lib in task_gen.bld.env.LIB_JSON:
if 'pebble' in lib:
lib_include_node = task_gen.bld.path.find_node(lib['path']).find_node('include')
append_to_attr(task_gen, 'includes',
[lib_include_node,
lib_include_node.find_node(str(lib['name'])).find_node(platform)])
@feature('c')
@after_method('process_source')
def fix_pebble_h_dependencies(task_gen):
"""
This method is called before all of the c aliases (objects, shlib, stlib, program, etc) and
ensures that the _wrap_c_preproc_scan method is run for all c tasks.
:param task_gen: the task generator instance
:return: N/A
"""
for task in task_gen.tasks:
if type(task) == c.c:
# Swap out the bound member function for our own
task.scan = types.MethodType(_wrap_c_preproc_scan, task, c.c)
@feature('pebble_cprogram')
@before_method('process_source')
def setup_pebble_cprogram(task_gen):
"""
This method is called before all of the c aliases (objects, shlib, stlib, program, etc) and
adds the appinfo.auto.c file to the source file list, adds the SDK pebble library to the lib
path for the build, sets the linkflags for the build, and specifies the linker script to
use during the linking step.
:param task_gen: the task generator instance
:return: None
"""
build_node = task_gen.path.get_bld().make_node(task_gen.env.BUILD_DIR)
platform = task_gen.env.PLATFORM_NAME
if not hasattr(task_gen, 'bin_type') or getattr(task_gen, 'bin_type') != 'lib':
append_to_attr(task_gen, 'source', build_node.make_node('appinfo.auto.c'))
append_to_attr(task_gen, 'source', build_node.make_node('src/resource_ids.auto.c'))
if task_gen.env.MESSAGE_KEYS:
append_to_attr(task_gen,
'source',
get_node_from_abspath(task_gen.bld,
task_gen.env.MESSAGE_KEYS_DEFINITION))
append_to_attr(task_gen, 'stlibpath',
find_sdk_component(task_gen.bld, task_gen.env, 'lib').abspath())
append_to_attr(task_gen, 'stlib', 'pebble')
for lib in task_gen.bld.env.LIB_JSON:
# Skip binary check for non-Pebble libs
if not 'pebble' in lib:
continue
binaries_path = task_gen.bld.path.find_node(lib['path']).find_node('binaries')
if binaries_path:
# Check for existence of platform folders inside binaries folder
platform_binary_path = binaries_path.find_node(platform)
if not platform_binary_path:
task_gen.bld.fatal("Library {} is missing the {} platform folder in {}".
format(lib['name'], platform, binaries_path))
# Check for existence of binary for each platform
if lib['name'].startswith('@'):
scoped_name = lib['name'].rsplit('/', 1)
lib_binary = (platform_binary_path.find_node(str(scoped_name[0])).
find_node("lib{}.a".format(scoped_name[1])))
else:
lib_binary = platform_binary_path.find_node("lib{}.a".format(lib['name']))
if not lib_binary:
task_gen.bld.fatal("Library {} is missing a binary for the {} platform".
format(lib['name'], platform))
# Link library binary (supports scoped names)
if lib['name'].startswith('@'):
append_to_attr(task_gen, 'stlibpath',
platform_binary_path.find_node(str(scoped_name[0])).abspath())
append_to_attr(task_gen, 'stlib', scoped_name[1])
else:
append_to_attr(task_gen, 'stlibpath', platform_binary_path.abspath())
append_to_attr(task_gen, 'stlib', lib['name'])
append_to_attr(task_gen, 'linkflags',
['-Wl,--build-id=sha1',
'-Wl,-Map,pebble-{}.map,--emit-relocs'.format(getattr(task_gen,
'bin_type',
'app'))])
if not hasattr(task_gen, 'ldscript'):
task_gen.ldscript = (
build_node.find_or_declare('pebble_app.ld.auto').path_from(task_gen.path))
def _get_entry_point(ctx, js_type, waf_js_entry_point):
"""
Returns the appropriate JS entry point, extracted from a project's package.json file,
wscript or common SDK default
:param ctx: the BuildContext
:param js_type: type of JS build, pkjs or rockyjs
:param waf_js_entry_point: the JS entry point specified by waftools
:return: the JS entry point for the bundled JS file
"""
fallback_entry_point = waf_js_entry_point
if not fallback_entry_point:
if js_type == 'pkjs':
if ctx.path.find_node('src/pkjs/index.js'):
fallback_entry_point = 'src/pkjs/index.js'
else:
fallback_entry_point = 'src/js/app.js'
if js_type == 'rockyjs':
fallback_entry_point = 'src/rocky/index.js'
project_info = ctx.env.PROJECT_INFO
if not project_info.get('main'):
return fallback_entry_point
if project_info['main'].get(js_type):
return str(project_info['main'][js_type])
return fallback_entry_point
@conf
def pbl_bundle(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_bundle`. We
set the custome features `js` and `bundle` to run when this method is invoked.
:param self: the BuildContext object
:param k: none expected
:param kw:
binaries - a list containing dictionaries specifying the HW platform targeted by the
binary built, the app binary, and an optional worker binary
js - the source JS files to be bundled
js_entry_file - an optional parameter to specify the entry JS file when
enableMultiJS is set to 'true'
:return: a task generator instance with keyword arguments specified
"""
if kw.get('bin_type', 'app') == 'lib':
kw['features'] = 'headers js package'
else:
if self.env.BUILD_TYPE == 'rocky':
kw['js_entry_file'] = _get_entry_point(self, 'pkjs', kw.get('js_entry_file'))
kw['features'] = 'js bundle'
return self(*k, **kw)
@conf
def pbl_build(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_build()`. We
set the custom features `c`, `cprogram` and `pebble_cprogram` to run when this method is
invoked. This method is intended to someday replace `pbl_program` and `pbl_worker` so that
all apps, workers, and libs will run through this method.
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source C files to be built and linked
target - the destination binary file for the compiled source
:return: a task generator instance with keyword arguments specified
"""
valid_bin_types = ('app', 'worker', 'lib', 'rocky')
bin_type = kw.get('bin_type', None)
if bin_type not in valid_bin_types:
self.fatal("The pbl_build method requires that a valid bin_type attribute be specified. "
"Valid options are {}".format(valid_bin_types))
if bin_type == 'rocky':
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
elif bin_type in ('app', 'worker'):
kw['features'] = 'c cprogram pebble_cprogram memory_usage'
kw[bin_type] = kw['target']
elif bin_type == 'lib':
kw['features'] = 'c cstlib memory_usage'
path, name = kw['target'].rsplit('/', 1)
kw['lib'] = self.path.find_or_declare(path).make_node("lib{}.a".format(name))
# Pass values needed for memory usage report
if bin_type != 'worker':
kw['resources'] = (
self.env.PROJECT_RESBALL if bin_type == 'lib' else
self.path.find_or_declare(self.env.BUILD_DIR).make_node('app_resources.pbpack'))
return self(*k, **kw)
@conf
def pbl_js_build(self, *k, **kw):
"""
This method is bound to the build context and is called by specifying `bld.pbl_cross_compile()`.
When this method is invoked, we set the custom feature `rockyjs` to run, which handles
processing of JS files in preparation for Rocky.js bytecode compilation (this actually
happens during resource generation)
:param self: the BuildContext object
:param k: none expected
:param kw:
source - the source JS files that will eventually be compiled into bytecode
target - the destination JS file that will be specified as the source file for the
bytecode compilation process
:return: a task generator instance with keyword arguments specified
"""
kw['js_entry_file'] = _get_entry_point(self, 'rockyjs', kw.get('js_entry_file'))
kw['features'] = 'rockyjs'
return self(*k, **kw)

View file

@ -0,0 +1,123 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sdk_paths
from process_sdk_resources import generate_resources
from sdk_helpers import (configure_libraries, configure_platform, get_target_platforms,
validate_message_keys_object)
def options(opt):
"""
Specify the options available when invoking waf; uses OptParse
:param opt: the OptionContext object
:return: N/A
"""
opt.load('pebble_sdk_common')
opt.add_option('-t', '--timestamp', dest='timestamp',
help="Use a specific timestamp to label this package (ie, your repository's last commit time), "
"defaults to time of build")
def configure(conf):
"""
Configure the build using information obtained from the package.json file
:param conf: the ConfigureContext object
:return: N/A
"""
conf.load('pebble_sdk_common')
# This overrides the default config in pebble_sdk_common.py
if conf.options.timestamp:
conf.env.TIMESTAMP = conf.options.timestamp
conf.env.BUNDLE_NAME = "dist.zip"
package_json_node = conf.path.get_src().find_node('package.json')
if package_json_node is None:
conf.fatal('Could not find package.json')
with open(package_json_node.abspath(), 'r') as f:
package_json = json.load(f)
# Extract project info from "pebble" object in package.json
project_info = package_json['pebble']
project_info['name'] = package_json['name']
validate_message_keys_object(conf, project_info, 'package.json')
conf.env.PROJECT_INFO = project_info
conf.env.BUILD_TYPE = 'lib'
conf.env.REQUESTED_PLATFORMS = project_info.get('targetPlatforms', [])
conf.env.LIB_DIR = "node_modules"
get_target_platforms(conf)
# With new-style projects, check for libraries specified in package.json
if 'dependencies' in package_json:
configure_libraries(conf, package_json['dependencies'])
conf.load('process_message_keys')
if 'resources' in project_info and 'media' in project_info['resources']:
conf.env.RESOURCES_JSON = package_json['pebble']['resources']['media']
# base_env is set to a shallow copy of the current ConfigSet for this ConfigureContext
base_env = conf.env
for platform in conf.env.TARGET_PLATFORMS:
# Create a deep copy of the `base_env` ConfigSet and set conf.env to a shallow copy of
# the resultant ConfigSet
conf.setenv(platform, base_env)
configure_platform(conf, platform)
# conf.env is set back to a shallow copy of the default ConfigSet stored in conf.all_envs['']
conf.setenv('')
def build(bld):
"""
This method is invoked from a project's wscript with the `ctz.load('pebble_sdk_lib')` call
and sets up all of the task generators for the SDK. After all of the build methods have run,
the configured task generators will run, generating build tasks and managing dependencies. See
https://waf.io/book/#_task_generators for more details on task generator setup.
:param bld: the BuildContext object
:return: N/A
"""
bld.load('pebble_sdk_common')
# cached_env is set to a shallow copy of the current ConfigSet for this BuildContext
cached_env = bld.env
for platform in bld.env.TARGET_PLATFORMS:
# bld.env is set to a shallow copy of the ConfigSet labeled <platform>
bld.env = bld.all_envs[platform]
# Set the build group (set of TaskGens) to the group labeled <platform>
if bld.env.USE_GROUPS:
bld.set_group(bld.env.PLATFORM_NAME)
# Generate resources specific to the current platform
resource_path = None
if bld.env.RESOURCES_JSON:
try:
resource_path = bld.path.find_node('src').find_node('resources').path_from(bld.path)
except AttributeError:
bld.fatal("Unable to locate resources at src/resources/")
generate_resources(bld, resource_path)
# bld.env is set back to a shallow copy of the original ConfigSet that was set when this `build`
# method was invoked
bld.env = cached_env

View file

@ -0,0 +1,209 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from waflib import Task
from waflib.TaskGen import feature
import mkbundle
from pebble_package import LibraryPackage
from process_elf import generate_bin_file
from resources.types.resource_ball import ResourceBall
@Task.update_outputs
class lib_package(Task.Task):
"""
Task class to generate a library bundle for distribution
"""
def run(self):
"""
This method executes when the package task runs
:return: N/A
"""
bld = self.generator.bld
build_dir = bld.bldnode
includes = {include.path_from(build_dir.find_node('include')): include.abspath()
for include in getattr(self, 'includes', [])}
binaries = {binary.path_from(build_dir): binary.abspath()
for binary in getattr(self, 'binaries', [])}
js = {js.path_from(build_dir.find_node('js')): js.abspath()
for js in getattr(self, 'js', [])}
resource_definitions = []
for resball in getattr(self, 'resources', []):
resource_definitions.extend(ResourceBall.load(resball.abspath()).get_all_declarations())
reso_list = []
for definition in resource_definitions:
if definition.target_platforms:
platforms = list(set(definition.target_platforms) & set(bld.env.TARGET_PLATFORMS))
else:
platforms = bld.env.TARGET_PLATFORMS
for platform in platforms:
platform_path = build_dir.find_node(bld.all_envs[platform].BUILD_DIR).relpath()
reso_list.append(build_dir.find_node("{}.{}.reso".format(
os.path.join(platform_path,
bld.path.find_node(definition.sources[0]).relpath()),
str(definition.name)
)))
resources = {
os.path.join(resource.path_from(build_dir).split('/', 1)[0],
resource.path_from(build_dir).split('/', 3)[3]): resource.abspath()
for resource in reso_list}
package = LibraryPackage(self.outputs[0].abspath())
package.add_files(includes=includes, binaries=binaries, resources=resources, js=js)
package.pack()
@Task.update_outputs
class app_bundle(Task.Task):
"""
Task class to generate an app bundle for distribution
"""
def run(self):
"""
This method executes when the bundle task runs
:return: N/A
"""
binaries = getattr(self, 'bin_files')
js_files = getattr(self, 'js_files')
outfile = self.outputs[0].abspath()
mkbundle.make_watchapp_bundle(
timestamp=self.generator.bld.env.TIMESTAMP,
appinfo=self.generator.bld.path.get_bld().find_node('appinfo.json').abspath(),
binaries=binaries,
js=[js_file.abspath() for js_file in js_files],
outfile=outfile
)
@feature('package')
def make_lib_bundle(task_gen):
"""
Bundle the build artifacts into a distributable library package.
Keyword arguments:
js -- A list of javascript files to package into the resulting bundle
includes -- A list of header files to package into library bundle
:param task_gen: the task generator instance
:return: None
"""
js = task_gen.to_nodes(getattr(task_gen, 'js', []))
includes = task_gen.to_nodes(getattr(task_gen, 'includes', []))
resources = []
binaries = []
for platform in task_gen.bld.env.TARGET_PLATFORMS:
bld_dir = task_gen.path.get_bld().find_or_declare(platform)
env = task_gen.bld.all_envs[platform]
resources.append(getattr(env, 'PROJECT_RESBALL'))
project_name = env.PROJECT_INFO['name']
if project_name.startswith('@'):
scoped_name = project_name.rsplit('/', 1)
binaries.append(
bld_dir.find_or_declare(str(scoped_name[0])).
find_or_declare("lib{}.a".format(scoped_name[1])))
else:
binaries.append(bld_dir.find_or_declare("lib{}.a".format(project_name)))
task = task_gen.create_task('lib_package',
[],
task_gen.bld.path.make_node(task_gen.bld.env.BUNDLE_NAME))
task.js = js
task.includes = includes
task.resources = resources
task.binaries = binaries
task.dep_nodes = js + includes + resources + binaries
# PBL-40925 Use pebble_package.py instead of mkbundle.py
@feature('bundle')
def make_pbl_bundle(task_gen):
"""
Bundle the build artifacts into a distributable package.
Keyword arguments:
js -- A list of javascript files to package into the resulting bundle
binaries -- A list of the binaries for each platform to include in the bundle
:param task_gen: the task generator instance
:return: None
"""
bin_files = []
bundle_sources = []
js_files = getattr(task_gen, 'js', [])
has_pkjs = bool(getattr(task_gen, 'js', False))
if has_pkjs:
bundle_sources.extend(task_gen.to_nodes(task_gen.js))
cached_env = task_gen.bld.env
if hasattr(task_gen, 'bin_type') and task_gen.bin_type == 'rocky':
binaries = []
for platform in task_gen.bld.env.TARGET_PLATFORMS:
binaries.append({"platform": platform,
"app_elf": "{}/pebble-app.elf".format(
task_gen.bld.all_envs[platform].BUILD_DIR)})
rocky_source_node = task_gen.bld.path.get_bld().make_node('resources/rocky-app.js')
js_files.append(rocky_source_node)
bundle_sources.append(rocky_source_node)
else:
binaries = task_gen.binaries
for binary in binaries:
task_gen.bld.env = task_gen.bld.all_envs[binary['platform']]
platform_build_node = task_gen.bld.path.find_or_declare(task_gen.bld.env.BUILD_DIR)
app_elf_file = task_gen.bld.path.get_bld().make_node(binary['app_elf'])
if app_elf_file is None:
raise Exception("Must specify elf argument to bundle")
worker_bin_file = None
if 'worker_elf' in binary:
worker_elf_file = task_gen.bld.path.get_bld().make_node(binary['worker_elf'])
app_bin_file = generate_bin_file(task_gen, 'app', app_elf_file, has_pkjs,
has_worker=True)
worker_bin_file = generate_bin_file(task_gen, 'worker', worker_elf_file, has_pkjs,
has_worker=True)
bundle_sources.append(worker_bin_file)
else:
app_bin_file = generate_bin_file(task_gen, 'app', app_elf_file, has_pkjs,
has_worker=False)
resources_pack = platform_build_node.make_node('app_resources.pbpack')
bundle_sources.extend([app_bin_file, resources_pack])
bin_files.append({'watchapp': app_bin_file.abspath(),
'resources': resources_pack.abspath(),
'worker_bin': worker_bin_file.abspath() if worker_bin_file else None,
'sdk_version': {'major': task_gen.bld.env.SDK_VERSION_MAJOR,
'minor': task_gen.bld.env.SDK_VERSION_MINOR},
'subfolder': task_gen.bld.env.BUNDLE_BIN_DIR})
task_gen.bld.env = cached_env
bundle_output = task_gen.bld.path.get_bld().make_node(task_gen.bld.env.BUNDLE_NAME)
task = task_gen.create_task('app_bundle', [], bundle_output)
task.bin_files = bin_files
task.js_files = js_files
task.dep_nodes = bundle_sources

View file

@ -0,0 +1,50 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import objcopy
import pebble_sdk_gcc
# TODO: PBL-33841 Make this a feature
def generate_bin_file(task_gen, bin_type, elf_file, has_pkjs, has_worker):
"""
Generate bin file by injecting metadata from elf file and resources file
:param task_gen: the task generator instance
:param bin_type: the type of binary being built (app, worker, lib)
:param elf_file: the path to the compiled elf file
:param has_pkjs: boolean for whether the build contains PebbleKit JS files
:param has_worker: boolean for whether the build contains a worker binary
:return: the modified binary file with injected metadata
"""
platform_build_node = task_gen.bld.path.get_bld().find_node(task_gen.bld.env.BUILD_DIR)
packaged_files = [elf_file]
resources_file = None
if bin_type != 'worker':
resources_file = platform_build_node.find_or_declare('app_resources.pbpack')
packaged_files.append(resources_file)
raw_bin_file = platform_build_node.make_node('pebble-{}.raw.bin'.format(bin_type))
bin_file = platform_build_node.make_node('pebble-{}.bin'.format(bin_type))
task_gen.bld(rule=objcopy.objcopy_bin, source=elf_file, target=raw_bin_file)
pebble_sdk_gcc.gen_inject_metadata_rule(task_gen.bld,
src_bin_file=raw_bin_file,
dst_bin_file=bin_file,
elf_file=elf_file,
resource_file=resources_file,
timestamp=task_gen.bld.env.TIMESTAMP,
has_pkjs=has_pkjs,
has_worker=has_worker)
return bin_file

View file

@ -0,0 +1,83 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from waflib.TaskGen import before_method, feature
from waflib import Context, Task
from sdk_helpers import get_node_from_abspath
@feature('headers')
@before_method('make_lib_bundle')
def process_headers(task_gen):
"""
Process all of the headers specified in the wscript file, as well as the headers generated
during the build process for the resource ids and message keys, as needed.
Keyword arguments:
includes -- A list of header files to copy
:param task_gen: the task generator instance
:return: None
"""
header_nodes = task_gen.to_nodes(task_gen.includes)
for platform in task_gen.env.TARGET_PLATFORMS:
env = task_gen.bld.all_envs[platform]
header_nodes.append(get_node_from_abspath(task_gen.bld, env['RESOURCE_ID_HEADER']))
# Add .h file containing app message keys
if 'MESSAGE_KEYS_HEADER' in dict(task_gen.env):
header_nodes.append(
get_node_from_abspath(task_gen.bld, task_gen.env['MESSAGE_KEYS_HEADER']))
# Copy header files to build/include/<libname> to provide naming collision protection in
# #includes
lib_name = str(task_gen.env.PROJECT_INFO['name'])
lib_include_node = task_gen.bld.path.get_bld().make_node('include').make_node(lib_name)
target_nodes = []
for header in header_nodes:
base_node = (task_gen.bld.path.get_bld() if header.is_child_of(task_gen.bld.path.get_bld())
else task_gen.bld.path)
if header.is_child_of(base_node.find_node('include')):
header_path = header.path_from(base_node.find_node('include'))
else:
header_path = header.path_from(base_node)
target_node = lib_include_node.make_node(header_path)
target_node.parent.mkdir()
target_nodes.append(target_node)
task_gen.includes = target_nodes
task_gen.create_task('copy_headers', src=header_nodes, tgt=target_nodes)
@Task.update_outputs
class copy_headers(Task.Task):
"""
Task class to copy specified headers from a source location to a target location
"""
def run(self):
"""
This method executes when the copy headers task runs
:return: N/A
"""
bld = self.generator.bld
if len(self.inputs) != len(self.outputs):
bld.fatal("Number of input headers ({}) does not match number of target headers ({})".
format(len(self.inputs), len(self.outputs)))
for i in range(len(self.inputs)):
bld.cmd_and_log('cp "{src}" "{tgt}"'.
format(src=self.inputs[i].abspath(), tgt=self.outputs[i].abspath()),
quiet=Context.BOTH)

266
sdk/waftools/process_js.py Normal file
View file

@ -0,0 +1,266 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import subprocess
from string import Template
from waflib.Errors import WafError
from waflib.TaskGen import before_method, feature
from waflib import Context, Logs, Node, Task
from sdk_helpers import find_sdk_component, get_node_from_abspath
from sdk_helpers import process_package
@feature('rockyjs')
@before_method('process_sdk_resources')
def process_rocky_js(task_gen):
"""
Lint the JS source files using a Rocky-specific linter
Keyword arguments:
js -- a list of JS files to process for the build
:param task_gen: the task generator instance
:return: N/A
"""
bld = task_gen.bld
task_gen.mappings = {'': (lambda task_gen, node: None)}
js_nodes = task_gen.to_nodes(task_gen.source)
target = task_gen.to_nodes(task_gen.target)
if not js_nodes:
task_gen.bld.fatal("Project does not contain any source code.")
js_nodes.append(find_sdk_component(bld, task_gen.env, 'include/rocky.js'))
# This locates the available node_modules folders and performs a search for the rocky-lint
# module. This code remains in this file un-abstracted because similar functionality is not yet
# needed elsewhere.
node_modules = []
rocky_linter = None
if bld.path.find_node('node_modules'):
node_modules.append(bld.path.find_node('node_modules'))
if bld.env.NODE_PATH:
node_modules.append(bld.root.find_node(bld.env.NODE_PATH))
for node_modules_node in node_modules:
rocky_linter = node_modules_node.ant_glob('rocky-lint/**/rocky-lint.js')
if rocky_linter:
rocky_linter = rocky_linter[0]
break
rocky_definitions = find_sdk_component(bld, task_gen.env, 'tools/rocky-lint/rocky.d.ts')
if rocky_linter and rocky_definitions:
lintable_nodes = [node for node in js_nodes if node.is_child_of(bld.path)]
lint_task = task_gen.create_task('lint_js', src=lintable_nodes)
lint_task.linter = [task_gen.env.NODE,
rocky_linter.path_from(bld.path),
'-d',
rocky_definitions.path_from(bld.path)]
else:
Logs.pprint('YELLOW', "Rocky JS linter not present - skipping lint task")
# Create JS merge task for Rocky.js files
merge_task = task_gen.create_task('merge_js', src=js_nodes, tgt=target)
merge_task.js_entry_file = task_gen.js_entry_file
merge_task.js_build_type = 'rocky'
@feature('js')
@before_method('make_pbl_bundle', 'make_lib_bundle')
def process_js(task_gen):
"""
Merge the JS source files into a single JS file if enableMultiJS is set to 'true', otherwise,
skip JS processing
Keyword arguments:
js -- A list of JS files to process for the build
:param task_gen: the task generator instance
:return: N/A
"""
# Skip JS handling if there are no JS files
js_nodes = task_gen.to_nodes(getattr(task_gen, 'js', []))
if not js_nodes:
return
# Create JS merge task if the project specifies "enableMultiJS: true"
if task_gen.env.PROJECT_INFO.get('enableMultiJS', False):
target_js = task_gen.bld.path.get_bld().make_node('pebble-js-app.js')
target_js_map = target_js.change_ext('.js.map')
task_gen.js = [target_js, target_js_map]
merge_task = task_gen.create_task('merge_js', src=js_nodes, tgt=[target_js, target_js_map])
merge_task.js_entry_file = task_gen.js_entry_file
merge_task.js_build_type = 'pkjs'
merge_task.js_source_map_config = {
'sourceMapFilename': target_js_map.name
}
return
# Check for pebble-js-app.js if developer does not specify "enableMultiJS: true" in
# the project
if task_gen.env.BUILD_TYPE != 'lib':
for node in js_nodes:
if 'pebble-js-app.js' in node.abspath():
break
else:
Logs.pprint("CYAN",
"WARNING: enableMultiJS is not enabled for this project and "
"pebble-js-app.js does not exist")
# For apps without multiJS enabled and libs, copy JS files from src folder to build folder,
# skipping any files already in the build folder
js_nodes_to_copy = [js_node for js_node in js_nodes if not js_node.is_bld()]
if not js_nodes_to_copy:
task_gen.js = js_nodes
return
target_nodes = []
for js in js_nodes_to_copy:
if js.is_child_of(task_gen.bld.path.find_node('src')):
js_path = js.path_from(task_gen.bld.path.find_node('src'))
else:
js_path = os.path.abspath(js.path_from(task_gen.bld.path))
target_node = task_gen.bld.path.get_bld().make_node(js_path)
target_node.parent.mkdir()
target_nodes.append(target_node)
task_gen.js = target_nodes + list(set(js_nodes) - set(js_nodes_to_copy))
task_gen.create_task('copy_js', src=js_nodes_to_copy, tgt=target_nodes)
class copy_js(Task.Task):
"""
Task class for copying source JS files to a target location
"""
def run(self):
"""
This method executes when the JS copy task runs
:return: N/A
"""
bld = self.generator.bld
if len(self.inputs) != len(self.outputs):
bld.fatal("Number of input JS files ({}) does not match number of target JS files ({})".
format(len(self.inputs), len(self.outputs)))
for i in range(len(self.inputs)):
bld.cmd_and_log('cp "{src}" "{tgt}"'.
format(src=self.inputs[i].abspath(), tgt=self.outputs[i].abspath()),
quiet=Context.BOTH)
class merge_js(Task.Task):
"""
Task class for merging all specified JS files into one `pebble-js-app.js` file
"""
def run(self):
"""
This method executes when the JS merge task runs
:return: N/A
"""
bld = self.generator.bld
js_build_type = getattr(self, 'js_build_type')
# Check for a valid JS entry point among JS files
js_nodes = self.inputs
entry_point = bld.path.find_resource(self.js_entry_file)
if entry_point not in js_nodes:
bld.fatal("\n\nJS entry file '{}' not found in JS source files '{}'. We expect to find "
"a javascript file here that we will execute directly when your app launches."
"\n\nIf you are an advanced user, you can supply the 'js_entry_file' "
"parameter to 'pbl_bundle' in your wscript to change the default entry point."
" Note that doing this will break CloudPebble compatibility.".
format(self.js_entry_file, js_nodes))
target_js = self.outputs[0]
entry = [
entry_point.abspath()
]
if js_build_type == 'pkjs':
# NOTE: The order is critical here.
# _pkjs_shared_additions.js MUST be the first in the `entry` array!
entry.insert(0, "_pkjs_shared_additions.js")
if self.env.BUILD_TYPE == 'rocky':
entry.insert(1, "_pkjs_message_wrapper.js")
common_node = bld.root.find_node(self.generator.env.PEBBLE_SDK_COMMON)
tools_webpack_node = common_node.find_node('tools').find_node('webpack')
webpack_config_template_node = tools_webpack_node.find_node('webpack-config.js.pytemplate')
with open(webpack_config_template_node.abspath()) as f:
webpack_config_template_content = f.read()
search_paths = [
common_node.find_node('include').abspath(),
tools_webpack_node.abspath(),
bld.root.find_node(self.generator.env.NODE_PATH).abspath(),
bld.path.get_bld().make_node('js').abspath()
]
pebble_packages = [str(lib['name']) for lib in bld.env.LIB_JSON if 'pebble' in lib]
aliases = {lib: "{}/dist/js".format(lib) for lib in pebble_packages}
info_json_file = bld.path.find_node('package.json') or bld.path.find_node('appinfo.json')
if info_json_file:
aliases.update({'app_package.json': info_json_file.abspath()})
config_file = (
bld.path.get_bld().make_node("webpack/{}/webpack.config.js".format(js_build_type)))
config_file.parent.mkdir()
with open(config_file.abspath(), 'w') as f:
m = {
'IS_SANDBOX': bool(self.env.SANDBOX),
'ENTRY_FILENAMES': entry,
'OUTPUT_PATH': target_js.parent.path_from(bld.path),
'OUTPUT_FILENAME': target_js.name,
'RESOLVE_ROOTS': search_paths,
'RESOLVE_ALIASES': aliases,
'SOURCE_MAP_CONFIG': getattr(self, 'js_source_map_config', None)
}
f.write(Template(webpack_config_template_content).substitute(
{k: json.dumps(m[k], separators=(',\n',': ')) for k in m }))
cmd = (
"'{webpack}' --config {config} --display-modules".
format(webpack=self.generator.env.WEBPACK, config=config_file.path_from(bld.path)))
try:
out = bld.cmd_and_log(cmd, quiet=Context.BOTH, output=Context.STDOUT)
except WafError as e:
bld.fatal("JS bundling failed\n{}\n{}".format(e.stdout, e.stderr))
else:
if self.env.VERBOSE > 0:
Logs.pprint('WHITE', out)
class lint_js(Task.Task):
"""
Task class for linting JS source files with a specified linter script.
"""
def run(self):
"""
This method executes when the JS lint task runs
:return: N/A
"""
self.name = 'lint_js'
js_nodes = self.inputs
for js_node in js_nodes:
cmd = self.linter + [js_node.path_from(self.generator.bld.path)]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if err:
Logs.pprint('CYAN', "\n========== Lint Results: {} ==========\n".format(js_node))
Logs.pprint('WHITE', "{}\n{}\n".format(out, err))
if proc.returncode != 0:
self.generator.bld.fatal("Project failed linting.")

View file

@ -0,0 +1,229 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from re import findall
from waflib.TaskGen import before_method, feature
from waflib import Logs, Task
from sdk_helpers import get_node_from_abspath
header = (
"""#pragma once
#include <stdint.h>
//
// AUTOGENERATED BY BUILD
// DO NOT MODIFY - CHANGES WILL BE OVERWRITTEN
//
""")
definitions_file = (
"""
#include <stdint.h>
//
// AUTOGENERATED BY BUILD
// DO NOT MODIFY - CHANGES WILL BE OVERWRITTEN
//
""")
def configure(conf):
"""
Configure the build by collecting all of the project's appKeys, as well the appKeys of any
dependencies, and writing them out to a header file and a JSON file for use in the project
:param conf: the ConfigureContext
:return: N/A
"""
if conf.env.BUILD_TYPE != 'lib':
if not dict(conf.env.PROJECT_INFO).get('enableMultiJS', False):
Logs.pprint("CYAN",
"WARNING: enableMultiJS is not enabled for this project. message_keys.json "
"will not be included in your project unless you add it to your "
"pebble-js-app.js file.")
keys = conf.env.PROJECT_INFO.get('messageKeys', conf.env.PROJECT_INFO.get('appKeys', []))
if conf.env.BUILD_TYPE == 'rocky':
if keys:
conf.fatal("Custom messageKeys are not supported for Rocky.js projects. Please "
"remove any messageKeys listed in your package.json file.")
else:
keys = {
"ControlKeyResetRequest": 1,
"ControlKeyResetComplete": 2,
"ControlKeyChunk": 3,
"ControlKeyUnsupportedError": 4,
}
key_list = []
key_dict = {}
block_message_keys = []
if keys:
if isinstance(keys, list):
key_list = keys
elif isinstance(keys, dict):
if conf.env.BUILD_TYPE == 'lib':
conf.fatal("Libraries can only specify an array of messageKeys; other object types "
"are not supported.")
key_dict = keys
else:
conf.fatal("You have specified an invalid messageKeys object in your project JSON "
"file.")
combined_key_list = key_list + key_dict.keys()
for lib in conf.env.LIB_JSON:
if not 'pebble' in lib or not 'messageKeys' in lib['pebble']:
continue
lib_keys = lib['pebble']['messageKeys']
if isinstance(lib_keys, list):
for key in lib_keys:
if key in combined_key_list:
conf.fatal("The messageKey '{}' has already been used and cannot be re-used by "
"the {} library.".format(key, lib['name']))
combined_key_list.append(key)
key_list.extend(lib_keys)
else:
conf.fatal("'{}' has an invalid messageKeys object. "
"Libraries can only specify an messageKeys array.".format(lib['name']))
if key_list:
next_key = 10000
multi_keys = [key for key in key_list if ']' in key]
single_keys = [key for key in key_list if ']' not in key]
for key in multi_keys:
try:
key_name, num_keys = findall(r"([\w]+)\[(\d+)\]$", key)[0]
except IndexError:
suggested_key_name = key.split('[', 1)[0]
conf.fatal("An invalid message key of `{}` was specified. Verify that a valid "
"length is specified if you are trying to allocate an array of keys "
"with a single identifier. For example, try `{}[2]`.".
format(key, suggested_key_name))
else:
key_dict.update({key_name: next_key})
next_key += int(num_keys)
block_message_keys.append(key_name)
key_dict.update({value: key for key, value in enumerate(single_keys, start=next_key)})
conf.env.PROJECT_INFO['messageKeys'] = key_dict
conf.env.PROJECT_INFO['appKeys'] = key_dict # Support legacy appinfo.json generation
conf.env.MESSAGE_KEYS = key_dict
conf.env.BLOCK_MESSAGE_KEYS = block_message_keys
bld_dir = conf.path.get_bld()
conf.env.MESSAGE_KEYS_HEADER = bld_dir.make_node('include/message_keys.auto.h').abspath()
if key_dict:
conf.env.MESSAGE_KEYS_DEFINITION = bld_dir.make_node('src/message_keys.auto.c').abspath()
conf.env.MESSAGE_KEYS_JSON = bld_dir.make_node('js/message_keys.json').abspath()
@feature('message_keys')
@before_method('cprogram', 'process_js', 'process_headers')
def process_message_keys(task_gen):
"""
Create the appropriate message key output files for the type of build, a header for a library,
and a header + JSON file for a library
:param task_gen: the task generator instance
:return: None
"""
message_keys = task_gen.env['MESSAGE_KEYS']
bld = task_gen.bld
# Create a header file that is included during lib/app builds
header_task = (
task_gen.create_task('message_key_header',
tgt=get_node_from_abspath(task_gen.bld,
getattr(task_gen.env,
'MESSAGE_KEYS_HEADER'))))
header_task.message_keys = message_keys
header_task.dep_vars = message_keys
if bld.env.BUILD_TYPE == 'lib' or not message_keys:
return
# Create a C file to satisfy any extern header files
definitions_task = (
task_gen.create_task('message_key_definitions',
tgt=get_node_from_abspath(task_gen.bld,
getattr(task_gen.env,
'MESSAGE_KEYS_DEFINITION'))))
definitions_task.message_keys = message_keys
definitions_task.dep_vars = message_keys
# Create a JSON file for apps to require
bld.path.get_bld().make_node('js').mkdir()
json_task = (
task_gen.create_task('message_key_json',
tgt=get_node_from_abspath(task_gen.bld,
getattr(task_gen.env, 'MESSAGE_KEYS_JSON'))))
json_task.message_keys = message_keys
json_task.dep_vars = message_keys
@Task.update_outputs
class message_key_header(Task.Task):
"""
Task class for creating a header file with the message key definitions for the project
"""
def run(self):
"""
This method executes when the message key header task runs
:return: N/A
"""
self.outputs[0].parent.mkdir()
with open(self.outputs[0].abspath(), 'w') as f:
f.write(header)
for k, v in sorted(self.message_keys.items(), key=lambda x: x[0]):
f.write("extern uint32_t MESSAGE_KEY_{};\n".format(k))
@Task.update_outputs
class message_key_definitions(Task.Task):
"""
Task class for creating a C definitions file with the message key definitions for the project
"""
def run(self):
"""
This method executes when the message key definitions task runs
:return: N/A
"""
self.outputs[0].parent.mkdir()
with open(self.outputs[0].abspath(), 'w') as f:
f.write(definitions_file)
for k, v in sorted(self.message_keys.items(), key=lambda x: x[0]):
f.write("uint32_t MESSAGE_KEY_{} = {};\n".format(k, v))
@Task.update_outputs
class message_key_json(Task.Task):
"""
Task class for creating a JSON file with the message key definitions for the project
"""
def run(self):
"""
This method executes when the message key header task runs
:return: N/A
"""
self.outputs[0].parent.mkdir()
with open(self.outputs[0].abspath(), 'w') as f:
json.dump(self.message_keys, f, sort_keys=True, indent=4, separators=(',', ': '))

View file

@ -0,0 +1,231 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from waflib import Node
from resources.find_resource_filename import find_most_specific_filename
from resources.types.resource_definition import ResourceDefinition
from resources.types.resource_object import ResourceObject
from resources.resource_map import resource_generator
import resources.resource_map.resource_generator_bitmap
import resources.resource_map.resource_generator_font
import resources.resource_map.resource_generator_js
import resources.resource_map.resource_generator_pbi
import resources.resource_map.resource_generator_png
import resources.resource_map.resource_generator_raw
from sdk_helpers import is_sdk_2x, validate_resource_not_larger_than
def _preprocess_resource_ids(bld, resources_list, has_published_media=False):
"""
This method reads all of the defined resources for the project and assigns resource IDs to
them prior to the start of resource processing. This preprocessing step is necessary in order
for the timeline lookup table to contain accurate resource IDs, while still allowing us the
prepend the TLUT as a resource in the resource ball.
:param bld: the BuildContext object
:param resources_list: the list of resources defined for this project
:param has_published_media: boolean for whether publishedMedia exists for the project
:return: None
"""
resource_id_mapping = {}
next_id = 1
if has_published_media:
# The timeline lookup table must be the first resource if one exists
resource_id_mapping['TIMELINE_LUT'] = next_id
next_id += 1
for res_id, res in enumerate(resources_list, start=next_id):
if isinstance(res, Node.Node):
if res.name == 'timeline_resource_table.reso':
continue
res_name = ResourceObject.load(res.abspath()).definition.name
resource_id_mapping[res_name] = res_id
else:
resource_id_mapping[res.name] = res_id
bld.env.RESOURCE_ID_MAPPING = resource_id_mapping
def generate_resources(bld, resource_source_path):
"""
This method creates all of the task generators necessary to handle every possible resource
allowed by the SDK.
:param bld: the BuildContext object
:param resource_source_path: the path from which to retrieve resource files
:return: N/A
"""
resources_json = getattr(bld.env, 'RESOURCES_JSON', [])
published_media_json = getattr(bld.env, 'PUBLISHED_MEDIA_JSON', [])
if resource_source_path:
resources_node = bld.path.find_node(resource_source_path)
else:
resources_node = bld.path.find_node('resources')
resource_file_mapping = {}
for resource in resources_json:
resource_file_mapping[resource['name']] = (
find_most_specific_filename(bld, bld.env, resources_node, resource['file']))
# Load the waftools that handle creating resource objects, a resource pack and the resource
# ID header
bld.load('generate_pbpack generate_resource_ball generate_resource_id_header')
bld.load('process_timeline_resources')
# Iterate over the resource definitions and do some processing to remove resources that
# aren't relevant to the platform we're building for and to apply various backwards
# compatibility adjustments
resource_definitions = []
max_menu_icon_dimensions = (25, 25)
for r in resources_json:
if 'menuIcon' in r and r['menuIcon']:
res_file = (
resources_node.find_node(find_most_specific_filename(bld, bld.env,
resources_node,
str(r['file'])))).abspath()
if not validate_resource_not_larger_than(bld, res_file,
dimensions=max_menu_icon_dimensions):
bld.fatal("menuIcon resource '{}' exceeds the maximum allowed dimensions of {}".
format(r['name'], max_menu_icon_dimensions))
defs = resource_generator.definitions_from_dict(bld, r, resource_source_path)
for d in defs:
if not d.is_in_target_platform(bld):
continue
if d.type == 'png-trans':
# SDK hack for SDK compatibility
# One entry in the media list with the type png-trans actually represents two
# resources, one for the black mask and one for the white mask. They each have
# their own resource ids, so we need two entries in our definitions list.
for suffix in ('WHITE', 'BLACK'):
new_definition = copy.deepcopy(d)
new_definition.name = '%s_%s' % (d.name, suffix)
resource_definitions.append(new_definition)
continue
if d.type == 'png' and is_sdk_2x(bld.env.SDK_VERSION_MAJOR, bld.env.SDK_VERSION_MINOR):
# We don't have png support in the 2.x sdk, instead process these into a pbi
d.type = 'pbi'
resource_definitions.append(d)
bld_dir = bld.path.get_bld().make_node(bld.env.BUILD_DIR)
lib_resources = []
for lib in bld.env.LIB_JSON:
# Skip resource handling if not a Pebble library or if no resources are specified
if 'pebble' not in lib or 'resources' not in lib['pebble']:
continue
if 'media' not in lib['pebble']['resources'] or not lib['pebble']['resources']['media']:
continue
lib_path = bld.path.find_node(lib['path'])
try:
resources_path = lib_path.find_node('resources').find_node(bld.env.PLATFORM_NAME)
except AttributeError:
bld.fatal("Library {} is missing resources".format(lib['name']))
else:
if resources_path is None:
bld.fatal("Library {} is missing resources for the {} platform".
format(lib['name'], bld.env.PLATFORM_NAME))
for lib_resource in bld.env.LIB_RESOURCES_JSON.get(lib['name'], []):
# Skip resources that specify targetPlatforms other than this one
if 'targetPlatforms' in lib_resource:
if bld.env.PLATFORM_NAME not in lib_resource['targetPlatforms']:
continue
reso_file = '{}.{}.reso'.format(lib_resource['file'], lib_resource['name'])
resource_node = resources_path.find_node(reso_file)
if resource_node is None:
bld.fatal("Library {} is missing the {} resource for the {} platform".
format(lib['name'], lib_resource['name'], bld.env.PLATFORM_NAME))
if lib_resource['name'] in resource_file_mapping:
bld.fatal("Duplicate resource IDs are not permitted. Package resource {} uses the "
"same resource ID as another resource already in this project.".
format(lib_resource['name']))
resource_file_mapping[lib_resource['name']] = resource_node
lib_resources.append(resource_node)
resources_list = []
if resource_definitions:
resources_list.extend(resource_definitions)
if lib_resources:
resources_list.extend(lib_resources)
build_type = getattr(bld.env, 'BUILD_TYPE', 'app')
resource_ball = bld_dir.make_node('system_resources.resball')
# If this is a library, generate a resource ball containing only resources provided in this
# project (not additional dependencies)
project_resource_ball = None
if build_type == 'lib':
project_resource_ball = bld_dir.make_node('project_resources.resball')
bld.env.PROJECT_RESBALL = project_resource_ball
if published_media_json:
# Only create TLUT for non-packages
if build_type != 'lib':
timeline_resource_table = bld_dir.make_node('timeline_resource_table.reso')
resources_list.append(timeline_resource_table)
_preprocess_resource_ids(bld, resources_list, True)
bld(features='process_timeline_resources',
published_media=published_media_json,
timeline_reso=timeline_resource_table,
layouts_json=bld_dir.make_node('layouts.json'),
resource_mapping=resource_file_mapping,
vars=['RESOURCE_ID_MAPPING', 'PUBLISHED_MEDIA_JSON'])
# Create resource objects from a set of resource definitions and package them in a resource ball
bld(features='generate_resource_ball',
resources=resources_list,
resource_ball=resource_ball,
project_resource_ball=project_resource_ball,
vars=['RESOURCES_JSON', 'LIB_RESOURCES_JSON', 'RESOURCE_ID_MAPPING'])
# Create a resource ID header for use during the linking step of the build
# FIXME PBL-36458: Since pebble.h requires this file through a #include, this file must be
# present for every project, regardless of whether or not resources exist for the project. At
# this time, this means the `generate_resource_id_header` task generator must run for every
# project. Since the input of the `generate_resource_id_header` task generator is the
# resource ball created by the `generate_resource_ball` task generator, the
# `generate_resource_ball` task generator must also run for every project.
resource_id_header = bld_dir.make_node('src/resource_ids.auto.h')
bld.env.RESOURCE_ID_HEADER = resource_id_header.abspath()
bld(features='generate_resource_id_header',
resource_ball=resource_ball,
resource_id_header_target=resource_id_header,
use_extern=build_type == 'lib',
use_define=build_type == 'app',
published_media=published_media_json)
resource_id_definitions = bld_dir.make_node('src/resource_ids.auto.c')
bld.env.RESOURCE_ID_DEFINITIONS = resource_id_definitions.abspath()
bld(features='generate_resource_id_definitions',
resource_ball=resource_ball,
resource_id_definitions_target=resource_id_definitions,
published_media=published_media_json)
if not bld.env.BUILD_TYPE or bld.env.BUILD_TYPE in ('app', 'rocky'):
# Create a resource pack for distribution with an application binary
pbpack = bld_dir.make_node('app_resources.pbpack')
bld(features='generate_pbpack',
resource_ball=resource_ball,
pbpack_target=pbpack,
is_system=False)

View file

@ -0,0 +1,232 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import struct
from waflib import Node, Task, TaskGen
from waflib.TaskGen import before_method, feature
from resources.types.resource_definition import ResourceDefinition
from resources.types.resource_object import ResourceObject
from sdk_helpers import validate_resource_not_larger_than
class layouts_json(Task.Task):
"""
Task class for generating a layouts JSON file with the timeline/glance resource id mapping for
publishedMedia items
"""
def run(self):
"""
This method executes when the layouts JSON task runs
:return: N/A
"""
published_media_dict = {m['id']: m['name'] for m in self.published_media}
timeline_entries = [{'id': media_id, 'name': media_name} for media_id, media_name in
published_media_dict.iteritems()]
image_uris = {
'resources': {'app://images/' + r['name']: r['id'] for r in timeline_entries}
}
# Write a dictionary (created from map output) to a json file in the build directory
with open(self.outputs[0].abspath(), 'w') as f:
json.dump(image_uris, f, indent=8)
def _collect_lib_published_media(ctx):
"""
Collects all lib-defined publishedMedia objects and provides a list for comparison with app
aliases
:param ctx: the current Context object
:return: a list of all defined publishedMedia items from included packages
"""
published_media = []
for lib in ctx.env.LIB_JSON:
if 'pebble' not in lib or 'resources' not in lib['pebble']:
continue
if 'publishedMedia' not in lib['pebble']['resources']:
continue
published_media.extend(lib['pebble']['resources']['publishedMedia'])
return published_media
class timeline_reso(Task.Task):
"""
Task class for generating a timeline lookup table for publishedMedia items, which is then
packed and packaged as a ResourceObject for later inclusion in a ResourceBall and PBPack
"""
def run(self):
"""
This method executes when the timeline reso task runs
:return: N/A
"""
bld = self.generator.bld
resource_id_mapping = self.env.RESOURCE_ID_MAPPING
TIMELINE_RESOURCE_TABLE_ENTRY_FMT = '<III'
TLUT_SIGNATURE = 'TLUT'
timeline_resources = []
published_media_from_libs = _collect_lib_published_media(self.generator)
# Create a sparse table to represent a c-style array
for item in self.published_media:
timeline_id = item.get('id', None)
published_media_name = item.get('name', None) # string representation of published_id
build_type = self.env.BUILD_TYPE
timeline_tiny_exists = 'timeline' in item and 'tiny' in item['timeline']
if 'glance' in item:
# Alias ['timeline']['tiny'] to ['glance'] if missing, or validate
# ['timeline']['tiny'] == ['glance'] if both exist
if not timeline_tiny_exists:
timeline = item.pop('timeline', {})
timeline.update({'tiny': item['glance']})
item['timeline'] = timeline
elif item['glance'] != item['timeline']['tiny']:
bld.fatal("Resource {} in publishedMedia specifies different values {} and {}"
"for ['glance'] and ['timeline']['tiny'] attributes, respectively. "
"Differing values for these fields are not supported.".
format(item['name'], item['glance'], item['timeline']['tiny']))
else:
if not timeline_tiny_exists:
if 'alias' in item and build_type != 'lib':
# Substitute package-defined publishedMedia item for objects with `alias`
# defined
for definition in published_media_from_libs:
if definition['name'] == item['alias']:
del item['alias']
del definition['name']
item.update(definition)
break
else:
bld.fatal("No resource for alias '{}' exists in installed packages".
format(item['alias']))
else:
bld.fatal("Resource {} in publishedMedia is missing values for ['glance'] "
"and ['timeline']['tiny'].".format(published_media_name))
# Extend table if needed
if timeline_id >= len(timeline_resources):
timeline_resources.extend({'tiny': 0, 'small': 0, 'large': 0} for x in
range(len(timeline_resources), timeline_id + 1))
# Set the resource IDs for this timeline item
for size, res_id in item['timeline'].iteritems():
if res_id not in resource_id_mapping:
bld.fatal("Invalid resource ID {} specified in publishedMedia".format(res_id))
timeline_resources[timeline_id][size] = resource_id_mapping[res_id]
# Serialize the table
table = TLUT_SIGNATURE
for r in timeline_resources:
table += struct.pack(TIMELINE_RESOURCE_TABLE_ENTRY_FMT,
r['tiny'],
r['small'],
r['large'])
r = ResourceObject(ResourceDefinition('raw', 'TIMELINE_LUT', ''), table)
r.dump(self.outputs[0])
def _get_resource_file(ctx, mapping, resource_id, resources_node=None):
try:
resource = mapping[resource_id]
except KeyError:
ctx.bld.fatal("No resource '{}' found for publishedMedia use.".format(resource_id))
if isinstance(resource, Node.Node):
return resource.abspath()
elif resources_node:
return resources_node.find_node(str(resource)).abspath()
else:
return ctx.path.find_node('resources').find_node(str(resource)).abspath()
@feature('process_timeline_resources')
@before_method('generate_resource_ball')
def process_timeline_resources(task_gen):
"""
Process all of the resources listed in the publishedMedia object in project JSON files.
As applicable, generate a layouts.json file for mobile apps to do resource id lookups,
and generate a timeline lookup table for FW to do resource id lookups.
Keyword arguments:
published_media -- A JSON object containing all of the resources defined as publishedMedia in a
project's JSON file
timeline_resource_table -- The name of the file to be used to store the timeline lookup table
layouts_json -- The name of the file to be used to store the JSON timeline/glance resource id
mapping
:param task_gen: the task generator instance
:return: N/A
"""
bld = task_gen.bld
build_type = task_gen.env.BUILD_TYPE
published_media = task_gen.published_media
timeline_resource_table = task_gen.timeline_reso
layouts_json = task_gen.layouts_json
mapping = task_gen.resource_mapping
MAX_SIZES = {
'glance': (25, 25),
'tiny': (25, 25),
'small': (50, 50),
'large': (80, 80)
}
used_ids = []
for item in published_media:
if 'id' not in item:
# Pebble Package builds omit the ID
if build_type == 'lib':
continue
else:
bld.fatal("Missing 'id' attribute for publishedMedia item '{}'".
format(item['name']))
# Check for duplicate IDs
if item['id'] in used_ids:
task_gen.bld.fatal("Cannot specify multiple resources with the same publishedMedia ID. "
"Please modify your publishedMedia items to only use the ID {} once".
format(item['id']))
else:
used_ids.append(item['id'])
# Check for valid resource dimensions
if 'glance' in item:
res_file = _get_resource_file(task_gen, mapping, item['glance'])
if not validate_resource_not_larger_than(task_gen.bld, res_file, MAX_SIZES['glance']):
bld.fatal("publishedMedia item '{}' specifies a resource '{}' for attribute "
"'glance' that exceeds the maximum allowed dimensions of {} x {} for "
"that attribute.".
format(item['name'], mapping[item['glance']], MAX_SIZES['glance'][0],
MAX_SIZES['glance'][1]))
if 'timeline' in item:
for size in ('tiny', 'small', 'large'):
if size in item['timeline']:
res_file = _get_resource_file(task_gen, mapping, item['timeline'][size])
if not validate_resource_not_larger_than(task_gen.bld, res_file,
MAX_SIZES[size]):
bld.fatal("publishedMedia item '{}' specifies a resource '{}' for size '{}'"
" that exceeds the maximum allowed dimensions of {} x {} for "
" that size.".
format(item['name'], mapping[item['timeline'][size]], size,
MAX_SIZES[size][0], MAX_SIZES[size][1]))
timeline_reso_task = task_gen.create_task('timeline_reso',
src=None, tgt=timeline_resource_table)
timeline_reso_task.published_media = published_media
layouts_json_task = task_gen.create_task('layouts_json', src=None, tgt=layouts_json)
layouts_json_task.published_media = published_media

View file

@ -0,0 +1,113 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from waflib import Logs, Task
from waflib.TaskGen import after_method, feature
from binutils import size
from memory_reports import (app_memory_report, app_resource_memory_error,
app_appstore_resource_memory_error,
bytecode_memory_report, simple_memory_report)
from sdk_helpers import is_sdk_2x
class memory_usage_report(Task.Task):
"""
Task class to print a memory usage report for the specified binary and resources, if any
"""
def run(self):
"""
This method executes when the memory usage report task runs
:return: None
"""
bin_type = self.bin_type
platform = self.generator.env.PLATFORM_NAME
if bin_type == 'rocky':
env = self.generator.bld.all_envs[self.env.PLATFORM_NAME]
Logs.pprint(*bytecode_memory_report(platform, env.SNAPSHOT_SIZE, env.SNAPSHOT_MAX))
return
bin_path = self.inputs[0].abspath()
resources_path = self.inputs[1].abspath() if len(self.inputs) > 1 else None
max_ram, max_resources, max_appstore_resources = self.max_sizes
# Handle zero-size binaries (more common with packages)
ram_size = sum(size(bin_path)) if size(bin_path) != 0 else 0
resource_size = os.stat(resources_path).st_size if resources_path else None
if resource_size and max_resources and max_appstore_resources:
if resource_size > max_resources:
Logs.pprint(*app_appstore_resource_memory_error(platform, resource_size,
max_resources))
return -1
elif resource_size > max_appstore_resources:
Logs.pprint(*app_appstore_resource_memory_error(platform, resource_size,
max_appstore_resources))
if max_ram:
# resource_size and max_appstore_resources are optional
free_ram = max_ram - ram_size
Logs.pprint(*app_memory_report(platform, bin_type, ram_size, max_ram,
free_ram, resource_size, max_appstore_resources))
else:
# resource_size is optional
Logs.pprint(*simple_memory_report(platform, ram_size, resource_size))
@feature('memory_usage')
@after_method('cprogram', 'cstlib', 'process_rocky_js')
def generate_memory_usage_report(task_gen):
"""
Generates and prints a report of the project's memory usage (binary + resources, if applicable).
Keyword arguments:
app -- The path to the app elf file, if this is an app being evaluated
worker - The path to the worker elf file, if this is a worker being evaluated
lib - The path to the library archive file, if this is a library being evaluated
resources - The path to the resource pack or resource ball, if resources exist for this bin_type
:param task_gen: the task generator instance
:return: None
"""
app, worker, lib, resources = (getattr(task_gen, attr, None)
for attr in ('app', 'worker', 'lib', 'resources'))
max_resources = task_gen.env.PLATFORM["MAX_RESOURCES_SIZE"]
max_resources_appstore = task_gen.env.PLATFORM["MAX_RESOURCES_SIZE_APPSTORE"]
app_max_ram = task_gen.env.PLATFORM["MAX_APP_MEMORY_SIZE"] if app else None
worker_max_ram = task_gen.env.PLATFORM["MAX_WORKER_MEMORY_SIZE"] if worker else None
if app:
app_task = task_gen.create_task('memory_usage_report',
[task_gen.to_nodes(app)[0],
task_gen.to_nodes(resources)[0]])
app_task.max_sizes = (app_max_ram, max_resources, max_resources_appstore)
app_task.bin_type = 'app'
if worker:
worker_task = task_gen.create_task('memory_usage_report',
task_gen.to_nodes(worker)[0])
worker_task.max_sizes = (worker_max_ram, None, None)
worker_task.bin_type = 'worker'
if lib:
lib_task = task_gen.create_task('memory_usage_report',
[task_gen.to_nodes(lib)[0],
task_gen.to_nodes(resources)[0]])
lib_task.max_sizes = (None, None, None)
lib_task.bin_type = 'lib'
if getattr(task_gen, 'bin_type', None) == 'rocky':
rocky_task = task_gen.create_task('memory_usage_report', task_gen.env.JS_RESO)
rocky_task.bin_type = 'rocky'
rocky_task.vars = ['PLATFORM_NAME']

395
sdk/waftools/sdk_helpers.py Normal file
View file

@ -0,0 +1,395 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import struct
import re
from waflib import Logs
from pebble_package import LibraryPackage
from pebble_sdk_platform import pebble_platforms, maybe_import_internal
from pebble_sdk_version import set_env_sdk_version
from resources.types.resource_object import ResourceObject
def _get_pbi_size(data):
"""
This method takes resource data and determines the dimensions of the pbi
:param data: the data contained in the pbi, starting at the header
:return: tuple containing the width and height of the pbi
"""
# Read the first byte at header offset 0x08 for width
width = struct.unpack('<h', data[8:10])[0]
# Read the next 2 bytes after the width to get the height
height = struct.unpack('<h', data[10:12])[0]
return width, height
def _get_pdc_size(data):
"""
This method takes resource data and determines the dimensions of the PDC
:param data: the data contained in the PDC, starting at the header
:return: tuple containing the width and height of the PDC
"""
# Read the first 2 bytes at header offset 0x06 for width
width = struct.unpack('>I', data[6:8])[0]
# Read the next 2 bytes after the width to get the height
height = struct.unpack('>I', data[8:10])[0]
return width, height
def _get_png_size(data):
"""
This resource takes resource data and determines the dimensions of the PNG
:param data: the data contained in the PNG, starting at the IHDR
:return: tuple containing the width and height of the PNG
"""
# Assert that this is the IHDR header
assert data[:4] == 'IHDR'
# Read the first 4 bytes after IHDR for width
width = struct.unpack('>I', data[4:8])[0]
# Read the next 4 bytes after the width to get the height
height = struct.unpack('>I', data[8:12])[0]
return width, height
def _get_supported_platforms(ctx, has_rocky=False):
"""
This method returns all of the supported SDK platforms, based off of SDK requirements found on
the filesystem
:param ctx: the Context object
:return: a list of the platforms that are supported for the given SDK
"""
sdk_check_nodes = ['lib/libpebble.a',
'pebble_app.ld.template',
'tools',
'include',
'include/pebble.h']
supported_platforms = os.listdir(ctx.env.PEBBLE_SDK_ROOT)
invalid_platforms = []
for platform in supported_platforms:
pebble_sdk_platform = ctx.root.find_node(ctx.env.PEBBLE_SDK_ROOT).find_node(platform)
for node in sdk_check_nodes:
if pebble_sdk_platform.find_node(node) is None:
if ctx.root.find_node(ctx.env.PEBBLE_SDK_COMMON).find_node(node) is None:
invalid_platforms.append(platform)
break
for platform in invalid_platforms:
supported_platforms.remove(platform)
if has_rocky and 'aplite' in supported_platforms:
supported_platforms.remove('aplite')
ctx.env.SUPPORTED_PLATFORMS = supported_platforms
return supported_platforms
def append_to_attr(self, attr, new_values):
"""
This helper method appends `new_values` to `attr` on the object `self`
:param self: the object
:param attr: the attribute to modify
:param new_values: the value(s) to set on the attribute
:return: N/A
"""
values = self.to_list(getattr(self, attr, []))
if not isinstance(new_values, list):
new_values = [new_values]
values.extend(new_values)
setattr(self, attr, values)
def configure_libraries(ctx, libraries):
dependencies = libraries.keys()
lib_json = []
lib_resources_json = {}
index = 0
while index < len(dependencies):
info, resources, additional_deps = process_package(ctx, dependencies[index])
lib_json.append(info)
lib_resources_json[dependencies[index]] = resources
dependencies.extend(additional_deps)
index += 1
# Store package.json info for each library and add resources to an environment variable for
# dependency-checking
ctx.env.LIB_JSON = lib_json
if lib_resources_json:
ctx.env.LIB_RESOURCES_JSON = lib_resources_json
def configure_platform(ctx, platform):
"""
Configure a build for the <platform> specified
:param ctx: the ConfigureContext
:param platform: the hardware platform this build is being targeted for
:return: N/A
"""
pebble_sdk_root = get_node_from_abspath(ctx, ctx.env.PEBBLE_SDK_ROOT)
ctx.env.PLATFORM = pebble_platforms[platform]
ctx.env.PEBBLE_SDK_PLATFORM = pebble_sdk_root.find_node(str(platform)).abspath()
ctx.env.PLATFORM_NAME = ctx.env.PLATFORM['NAME']
for attribute in ['DEFINES']: # Attributes with list values
ctx.env.append_unique(attribute, ctx.env.PLATFORM[attribute])
for attribute in ['BUILD_DIR', 'BUNDLE_BIN_DIR']: # Attributes with a single value
ctx.env[attribute] = ctx.env.PLATFORM[attribute]
ctx.env.append_value('INCLUDES', ctx.env.BUILD_DIR)
ctx.msg("Found Pebble SDK for {} in:".format(platform), ctx.env.PEBBLE_SDK_PLATFORM)
process_info = (
pebble_sdk_root.find_node(str(platform)).find_node('include/pebble_process_info.h'))
set_env_sdk_version(ctx, process_info)
if is_sdk_2x(ctx.env.SDK_VERSION_MAJOR, ctx.env.SDK_VERSION_MINOR):
ctx.env.append_value('DEFINES', "PBL_SDK_2")
else:
ctx.env.append_value('DEFINES', "PBL_SDK_3")
ctx.load('pebble_sdk_gcc')
def find_sdk_component(ctx, env, component):
"""
This method finds an SDK component, either in the platform SDK folder, or the 'common' folder
:param ctx: the Context object
:param env: the environment which contains platform SDK folder path for the current platform
:param component: the SDK component being sought
:return: the path to the SDK component being sought
"""
return (ctx.root.find_node(env.PEBBLE_SDK_PLATFORM).find_node(component) or
ctx.root.find_node(env.PEBBLE_SDK_COMMON).find_node(component))
def get_node_from_abspath(ctx, path):
return ctx.root.make_node(path)
def get_target_platforms(ctx):
"""
This method returns a list of target platforms for a build, by comparing the list of requested
platforms to the list of supported platforms, returning all of the supported platforms if no
specific platforms are requested
:param ctx: the Context object
:return: list of target platforms for the build
"""
supported_platforms = _get_supported_platforms(ctx, ctx.env.BUILD_TYPE == 'rocky')
if not ctx.env.REQUESTED_PLATFORMS:
target_platforms = supported_platforms
else:
target_platforms = list(set(supported_platforms) & set(ctx.env.REQUESTED_PLATFORMS))
if not target_platforms:
ctx.fatal("No valid targetPlatforms specified in appinfo.json. Valid options are {}"
.format(supported_platforms))
ctx.env.TARGET_PLATFORMS = sorted([p.encode('utf-8') for p in target_platforms], reverse=True)
return target_platforms
def is_sdk_2x(major, minor):
"""
This method checks if a <major>.<minor> API version are associated with a 2.x version of the SDK
:param major: the major API version to check
:param minor: the minor API version to check
:return: boolean representing whether a 2.x SDK is being used or not
"""
LAST_2X_MAJOR_VERSION = 5
LAST_2X_MINOR_VERSION = 19
return (major, minor) <= (LAST_2X_MAJOR_VERSION, LAST_2X_MINOR_VERSION)
def process_package(ctx, package, root_lib_node=None):
"""
This method parses the package.json for a given package and returns relevant information
:param ctx: the Context object
:param root_lib_node: node containing the package to be processed, if not the standard LIB_DIR
:param package: the package to parse information for
:return:
- a dictionary containing the contents of package.json
- a dictionary containing the resources object for the package
- a list of dependencies for this package
"""
resources_json = {}
if not root_lib_node:
root_lib_node = ctx.path.find_node(ctx.env.LIB_DIR)
if root_lib_node is None:
ctx.fatal("Missing {} directory".format(ctx.env.LIB_DIR))
lib_node = root_lib_node.find_node(str(package))
if lib_node is None:
ctx.fatal("Missing library for {} in {}".format(str(package), ctx.env.LIB_DIR))
else:
libinfo_node = lib_node.find_node('package.json')
if libinfo_node is None:
ctx.fatal("Missing package.json for {} library".format(str(package)))
else:
if lib_node.find_node(ctx.env.LIB_DIR):
error_str = ("ERROR: Multiple versions of the same package are not supported by "
"the Pebble SDK due to namespace issues during linking. Package '{}' "
"contains the following duplicate and incompatible dependencies, "
"which may lead to additional build errors and/or unpredictable "
"runtime behavior:\n".format(package))
packages_str = ""
for package in lib_node.find_node(ctx.env.LIB_DIR).ant_glob('**/package.json'):
with open(package.abspath()) as f:
info = json.load(f)
if not dict(ctx.env.PROJECT_INFO).get('enableMultiJS', False):
if not 'pebble' in info:
continue
packages_str += " '{}': '{}'\n".format(info['name'], info['version'])
if packages_str:
Logs.pprint("RED", error_str + packages_str)
with open(libinfo_node.abspath()) as f:
libinfo = json.load(f)
if 'pebble' in libinfo:
if ctx.env.BUILD_TYPE == 'rocky':
ctx.fatal("Packages containing C binaries are not compatible with Rocky.js "
"projects. Please remove '{}' from the `dependencies` object in "
"package.json".format(libinfo['name']))
libinfo['path'] = lib_node.make_node('dist').path_from(ctx.path)
if 'resources' in libinfo['pebble']:
if 'media' in libinfo['pebble']['resources']:
resources_json = libinfo['pebble']['resources']['media']
# Extract package into "dist" folder
dist_node = lib_node.find_node('dist.zip')
if not dist_node:
ctx.fatal("Missing dist.zip file for {}. Are you sure this is a Pebble "
"library?".format(package))
lib_package = LibraryPackage(dist_node.abspath())
lib_package.unpack(libinfo['path'])
lib_js_node = lib_node.find_node('dist/js')
if lib_js_node:
libinfo['js_paths'] = [lib_js.path_from(ctx.path) for lib_js in
lib_js_node.ant_glob(['**/*.js', '**/*.json'])]
else:
libinfo['js_paths'] = [lib_js.path_from(ctx.path) for lib_js in
lib_node.ant_glob(['**/*.js', '**/*.json'],
excl="**/*.min.js")]
dependencies = libinfo['dependencies'].keys() if 'dependencies' in libinfo else []
return libinfo, resources_json, dependencies
def truncate_to_32_bytes(name):
"""
This method takes an input string and returns a 32-byte truncated string if the input string is
longer than 32 bytes
:param name: the string to truncate
:return: the truncated string, if the input string was > 32 bytes, or else the original input
string
"""
return name[:30] + '..' if len(name) > 32 else name
def validate_message_keys_object(ctx, project_info, info_json_type):
"""
Verify that the appropriately-named message key object is present in the project info file
:param ctx: the ConfigureContext object
:param project_info: JSON object containing project info
:param info_json_type: string containing the name of the file used to extract project info
:return: N/A
"""
if 'appKeys' in project_info and info_json_type == 'package.json':
ctx.fatal("Project contains an invalid object `appKeys` in package.json. Please use "
"`messageKeys` instead.")
if 'messageKeys' in project_info and info_json_type == 'appinfo.json':
ctx.fatal("Project contains an invalid object `messageKeys` in appinfo.json. Please use "
"`appKeys` instead.")
def validate_resource_not_larger_than(ctx, resource_file, dimensions=None, width=None, height=None):
"""
This method takes a resource file and determines whether the file's dimensions exceed the
maximum allowed values provided.
:param resource_file: the path to the resource file
:param dimensions: tuple specifying max width and height
:param width: number specifying max width
:param height: number specifying max height
:return: boolean for whether the resource is larger than the maximum allowed size
"""
if not dimensions and not width and not height:
raise TypeError("Missing values for maximum width and/or height to validate against")
if dimensions:
width, height = dimensions
with open(resource_file, 'rb') as f:
if resource_file.endswith('.reso'):
reso = ResourceObject.load(resource_file)
if reso.definition.type == 'bitmap':
storage_format = reso.definition.storage_format
else:
storage_format = reso.definition.type
if storage_format == 'pbi':
resource_size = _get_pbi_size(reso.data)
elif storage_format == 'png':
resource_size = _get_png_size(reso.data[12:])
elif storage_format == 'raw':
try:
assert reso.data[4:] == 'PDCI'
except AssertionError:
ctx.fatal("Unsupported published resource type for {}".format(resource_file))
else:
resource_size = _get_pdc_size(reso.data[4:])
else:
data = f.read(24)
if data[1:4] == 'PNG':
resource_size = _get_png_size(data[12:])
elif data[:4] == 'PDCI':
resource_size = _get_pdc_size(data[4:])
else:
ctx.fatal("Unsupported published resource type for {}".format(resource_file))
if width and height:
return resource_size <= (width, height)
elif width:
return resource_size[0] <= width
elif height:
return resource_size[1] <= height
def wrap_task_name_with_platform(self):
"""
This method replaces the existing waf Task class's __str__ method with the original content
of the __str__ method, as well as an additional "<platform> | " before the task information,
if a platform is set.
:param self: the task instance
:return: the user-friendly string to print
"""
src_str = ' '.join([a.nice_path() for a in self.inputs])
tgt_str = ' '.join([a.nice_path() for a in self.outputs])
sep = ' -> ' if self.outputs else ''
name = self.__class__.__name__.replace('_task', '')
# Modification to the original __str__ method
if self.env.PLATFORM_NAME:
name = self.env.PLATFORM_NAME + " | " + name
return '%s: %s%s%s\n' % (name, src_str, sep, tgt_str)

27
sdk/waftools/sdk_paths.py Normal file
View file

@ -0,0 +1,27 @@
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is used to import any paths required by the SDK file structure for building Pebble
projects. Even though this script is not specifically a waftool, we benefit from bundling it
together with the other waftools because it automatically gets included in the search path used for
imports by other waftools.
"""
import os
import sys
sdk_root_dir = os.path.dirname(sys.path[0])
sys.path.append(os.path.join(sdk_root_dir, 'common/waftools'))
sys.path.append(os.path.join(sdk_root_dir, 'common/tools'))

275
sdk/wscript Normal file
View file

@ -0,0 +1,275 @@
#
# This waf script is responsible for building the SDK which can be shipped off to users into
# tintin/build/sdk/src_wscript is the file which app developers actually run to build their apps
#
import json
import os
import waflib
from string import Template
from tools.fw_elf_obfuscate import obfuscate
COPY = "cp ${SRC} ${TGT}"
def _generate_sdk_waf(ctx):
"""
Build a custom version of waf that includes the waf plugins we need
:param bld:
:return:
"""
sdk_waftools = [tool.path_from(ctx.path.parent) for tool in ctx.path.ant_glob('waftools/*.py')]
shared_waftools = [
"tools/resources/waftools/generate_resource_ball.py",
"tools/resources/waftools/generate_pbpack.py",
"tools/resources/waftools/generate_resource_id_header.py",
"waftools/file_name_c_define.py",
"waftools/ldscript.py",
"waftools/objcopy.py",
"waftools/pebble_sdk_gcc.py",
"waftools/pebble_sdk_version.py",
"waftools/xcode_pebble.py"
]
pebble_waf_tools = []
for tool in sdk_waftools + shared_waftools:
path = ctx.path.parent.find_node(tool)
if path is None:
ctx.fatal("Trying to bundle non existent resource in pb-waf ({})".format(tool))
pebble_waf_tools.append(path)
# We cannot run this as a sub-wscript because we use a specific vendor-provided
# wscript that provides the --make-waf option and needs to be run in its own clean
# environment
def _build_waf(task):
bld = task.generator.bld
cmd_str = ('cd "{}" && python "{}" distclean configure build --make-waf --tools="{}" &&'
'cp waf "{}"'.format(waf_folder.abspath(),
task.inputs[0].abspath(),
','.join(x.abspath() for x in task.inputs[1:]),
task.outputs[0].abspath()))
try:
bld.cmd_and_log(cmd_str, quiet=waflib.Context.BOTH)
except waflib.Errors.WafError as e:
bld.to_log("out: %s" % e.stdout)
bld.to_log("err: %s" % e.stderr)
raise e
waf_folder = ctx.path.find_node('waf')
waf_light = waf_folder.find_node('waf-light')
ctx(rule=_build_waf,
source=[waf_light, ] + pebble_waf_tools,
target=waf_folder.get_bld())
def _copy_common_tools(bld, common_folder_node):
"""
Copy SDK tools into common/waftools and common/tools
:param bld:
:param common_folder_node:
:return:
"""
for tool in bld.path.ant_glob(['tools/**/*']):
bld(rule=COPY,
source=tool,
target=common_folder_node.make_node(tool.path_from(bld.path)))
shared_tools = [
"tools/binutils.py",
"tools/bitmapgen.py",
"tools/font/__init__.py",
"tools/font/fontgen.py",
"tools/generate_appinfo.py",
"tools/generate_c_byte_array.py",
"tools/mkbundle.py",
"tools/pbpack.py",
"tools/pbpack_meta_data.py",
"tools/pebble_image_routines.py",
"tools/pebble_sdk_platform.py",
"tools/png2pblpng.py",
"tools/stm32_crc.py"
]
if bld.env.INTERNAL_SDK_BUILD:
shared_tools.append("tools/pebble_sdk_platform_internal.py")
for tool in shared_tools:
bld(rule=COPY,
source=bld.path.parent.find_node(tool),
target=common_folder_node.make_node(tool))
resource_waftools = [
"tools/resources/__init__.py",
"tools/resources/find_resource_filename.py",
"tools/resources/resource_map/__init__.py",
"tools/resources/resource_map/resource_generator.py",
"tools/resources/resource_map/resource_generator_bitmap.py",
"tools/resources/resource_map/resource_generator_font.py",
"tools/resources/resource_map/resource_generator_js.py",
"tools/resources/resource_map/resource_generator_pbi.py",
"tools/resources/resource_map/resource_generator_png.py",
"tools/resources/resource_map/resource_generator_raw.py",
"tools/resources/types/__init__.py",
"tools/resources/types/resource_ball.py",
"tools/resources/types/resource_declaration.py",
"tools/resources/types/resource_definition.py",
"tools/resources/types/resource_object.py"
]
for tool in resource_waftools:
tool_node = bld.path.parent.find_node(tool)
bld(rule=COPY,
source=tool_node,
target=(common_folder_node.make_node('waftools')
.make_node(tool_node.path_from(bld.path.parent.find_node('tools')))))
def options(opt):
opt.add_option('--sdk_debug_elf', action='store_true',
help='Enable building obfuscated ELF files for SDK debugging.')
def configure(conf):
if conf.options.sdk_debug_elf:
conf.env.INCLUDE_SDK_DEBUG_ELF = True
def build(bld):
bld(rule=COPY,
source=bld.path.find_node('sdk_requirements.txt'),
target=bld.path.get_bld().make_node('requirements.txt'))
bld(rule=COPY,
source=bld.path.find_node('sdk_package.json'),
target=bld.path.get_bld().make_node('package.json'))
bld(rule=COPY,
source=bld.path.find_node('use_requirements.json'),
target=bld.path.get_bld().make_node('use_requirements.json'))
tintin_home = bld.path.parent
platform_folder_node = bld.path.get_bld().make_node(bld.env.PLATFORM_NAME)
platform_folder_node.parent.mkdir()
bld(features='subst',
source=bld.path.find_node('Doxyfile-SDK.template'),
target=platform_folder_node.make_node('Doxyfile-SDK.auto'),
TINTIN_ROOT=tintin_home.abspath(),
PLATFORM_PATH=platform_folder_node.path_from(bld.path.parent))
common_folder_node = bld.path.get_bld().make_node('common')
common_folder_node.parent.mkdir()
for sdk_file in bld.path.ant_glob(['include/*', 'pebble_app.ld.template']):
bld(rule=COPY,
source=sdk_file,
target=common_folder_node.make_node(sdk_file.path_from(bld.path)))
if not bld.env.NOJS:
js_tooling_path = os.path.dirname(bld.env.JS_TOOLING_SCRIPT.relpath())
for js_tool in ('js_tooling.js', 'generate_snapshot.js'):
bld(rule=COPY,
source=bld.path.parent.get_bld().make_node(js_tooling_path).make_node(js_tool),
target=common_folder_node.make_node('tools').make_node(js_tool),
name='copy_rocky_tooling')
template_folder_node = common_folder_node.make_node('templates')
template_folder_node.parent.mkdir()
defaults_node = bld.path.find_node('defaults')
# Check whether the default project files are valid templates:
with open(defaults_node.find_node('templates.json').abspath()) as f:
templates = json.load(f)
def _collect_check_templates_tasks(dct):
for key in dct:
val = dct[key]
if isinstance(val, basestring):
# avoid unicode, it will trip up waf's Node3 and make it 💩 all over the place
val = str(val)
template_node = defaults_node.find_node(val.split(os.path.sep))
if not template_node:
waflib.Logs.warn(
"Could not find {}, but it's defined in "
"templates.json".format(val))
continue
with open(template_node.abspath()) as tf:
try:
Template(tf.read()).substitute()
except KeyError:
pass # This is expected, no args to substitute()
except ValueError as e:
bld.fatal(
"Template error in {}:\n{}\n"
"Hint: make sure to escape dollar signs! ($ => $$)".format(
template_node.abspath(), e.message))
elif isinstance(val, dict):
_collect_check_templates_tasks(val)
_collect_check_templates_tasks(templates)
# Copy default SDK project files
for default_file in bld.path.ant_glob('defaults/**/*'):
bld(rule=COPY,
source=default_file,
target=template_folder_node.make_node(default_file.path_from(defaults_node)))
# Generate shims
# We shell out to this script because it imports the clang module, which does not run correctly
# under pypy. By running python explicitly when calling this script, we avoid the
# incompatibility with pypy and clang.
native_generator_script = (
bld.path.parent.find_node('tools/generate_native_sdk/generate_pebble_native_sdk_files.py'))
export_symbols = bld.path.parent.find_node('tools/generate_native_sdk/exported_symbols.json')
source_dir = bld.path.parent.find_node('src')
output_source_dir = source_dir.get_bld()
with open(export_symbols.abspath()) as f:
native_generator_sources = (
[source_dir.find_node(str(header)) for header in json.load(f)['files']])
native_generator_sources.append(export_symbols)
native_generator_targets = [bld.path.parent.make_node('src/fw/pebble.auto.c').get_bld(),
platform_folder_node.make_node('include/pebble.h'),
platform_folder_node.make_node('include/pebble_sdk_version.h'),
platform_folder_node.make_node('include/pebble_process_info.h'),
platform_folder_node.make_node('include/pebble_worker.h'),
platform_folder_node.make_node('include/pebble_worker_sdk_version.h')]
bld(rule="cd '{}' ; python '{}' --sdk-dir='{}' '{}' '{}' '{}' '{}' {}".
format(tintin_home.abspath(),
native_generator_script.abspath(),
platform_folder_node.abspath(),
export_symbols.abspath(),
source_dir.abspath(),
output_source_dir.abspath(),
bld.env.PLATFORM_NAME,
'--internal-sdk-build' if bld.env.INTERNAL_SDK_BUILD else ''),
name="generate_native_sdk",
source=native_generator_sources,
target=native_generator_targets)
_generate_sdk_waf(bld)
_copy_common_tools(bld, common_folder_node)
# Generate our exported font header based on the whitelist in exported_symbols.json.
# This is different than our internal header (font_resource_keys.auto.h) as it excludes
# some fonts that we don't want to export
def _generate_pebble_fonts_h(task):
with open(task.outputs[0].abspath(), 'w') as f_out:
f_out.write('#pragma once\n')
f_out.write('\n')
with open(task.inputs[0].abspath(), 'r') as f_in:
font_list = json.load(f_in)["fonts"]
for font in font_list:
f_out.write('#define FONT_KEY_{0} "RESOURCE_ID_{0}"\n'.format(font))
# Copy any font keys over to the SDK
bld(rule=_generate_pebble_fonts_h,
source=export_symbols,
target=platform_folder_node.make_node('include/pebble_fonts.h'))
# Generate obfuscated elf file for GDB debugging
if bld.env.INCLUDE_SDK_DEBUG_ELF:
def _obfuscate_elf(task):
input_elf = task.inputs[0].abspath()
output_elf = task.outputs[0].abspath()
obfuscate(input_elf, output_elf, no_text=False)
firmware_build_node = bld.path.parent.get_bld().find_or_declare('src').find_or_declare('fw')
bld(rule=_obfuscate_elf,
source=firmware_build_node.make_node('tintin_fw.elf'),
target=bld.path.get_bld().make_node('{}_sdk_debug.elf'.format(bld.env.PLATFORM_NAME)))