Initial community commit
This commit is contained in:
parent
537bcbc862
commit
fc06254474
16440 changed files with 4239995 additions and 2 deletions
438
Src/external_dependencies/openmpt-trunk/include/premake/src/base/_foundation.lua
vendored
Normal file
438
Src/external_dependencies/openmpt-trunk/include/premake/src/base/_foundation.lua
vendored
Normal file
|
@ -0,0 +1,438 @@
|
|||
---
|
||||
-- Base definitions required by all the other scripts.
|
||||
-- @copyright 2002-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
premake = premake or {}
|
||||
premake._VERSION = _PREMAKE_VERSION
|
||||
package.loaded["premake"] = premake
|
||||
|
||||
premake.modules = {}
|
||||
premake.extensions = premake.modules
|
||||
|
||||
local semver = dofile('semver.lua')
|
||||
local p = premake
|
||||
|
||||
|
||||
-- Keep track of warnings that have been shown, so they don't get shown twice
|
||||
|
||||
local _warnings = {}
|
||||
|
||||
-- Keep track of aliased functions, so I can resolve to canonical names
|
||||
|
||||
local _aliases = {}
|
||||
|
||||
--
|
||||
-- Define some commonly used symbols, for future-proofing.
|
||||
--
|
||||
|
||||
premake.C = "C"
|
||||
premake.C7 = "c7"
|
||||
premake.CLANG = "clang"
|
||||
premake.CONSOLEAPP = "ConsoleApp"
|
||||
premake.CPP = "C++"
|
||||
premake.CSHARP = "C#"
|
||||
premake.GCC = "gcc"
|
||||
premake.HAIKU = "haiku"
|
||||
premake.ANDROID = "android"
|
||||
premake.IOS = "ios"
|
||||
premake.LINUX = "linux"
|
||||
premake.MACOSX = "macosx"
|
||||
premake.MAKEFILE = "Makefile"
|
||||
premake.MBCS = "MBCS"
|
||||
premake.NONE = "None"
|
||||
premake.DEFAULT = "Default"
|
||||
premake.OBJECTIVEC = "Objective-C"
|
||||
premake.OBJECTIVECPP = "Objective-C++"
|
||||
premake.ON = "On"
|
||||
premake.OFF = "Off"
|
||||
premake.POSIX = "posix"
|
||||
premake.PS3 = "ps3"
|
||||
premake.SHAREDITEMS = "SharedItems"
|
||||
premake.SHAREDLIB = "SharedLib"
|
||||
premake.STATICLIB = "StaticLib"
|
||||
premake.UNICODE = "Unicode"
|
||||
premake.UNIVERSAL = "universal"
|
||||
premake.UTILITY = "Utility"
|
||||
premake.PACKAGING = "Packaging"
|
||||
premake.WINDOWEDAPP = "WindowedApp"
|
||||
premake.WINDOWS = "windows"
|
||||
premake.X86 = "x86"
|
||||
premake.X86_64 = "x86_64"
|
||||
premake.ARM = "ARM"
|
||||
premake.ARM64 = "ARM64"
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Provide an alias for a function in a namespace. Calls to the alias will
|
||||
-- invoke the canonical function, and attempts to override the alias will
|
||||
-- instead override the canonical call.
|
||||
--
|
||||
-- @param scope
|
||||
-- The table containing the function to be overridden. Use _G for
|
||||
-- global functions.
|
||||
-- @param canonical
|
||||
-- The name of the function to be aliased (a string value)
|
||||
-- @param alias
|
||||
-- The new alias for the function (another string value).
|
||||
---
|
||||
|
||||
function p.alias(scope, canonical, alias)
|
||||
scope, canonical = p.resolveAlias(scope, canonical)
|
||||
if not scope[canonical] then
|
||||
error("unable to alias '" .. canonical .. "'; no such function", 2)
|
||||
end
|
||||
|
||||
_aliases[scope] = _aliases[scope] or {}
|
||||
_aliases[scope][alias] = canonical
|
||||
|
||||
scope[alias] = function(...)
|
||||
return scope[canonical](...)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Call a list of functions.
|
||||
--
|
||||
-- @param funcs
|
||||
-- The list of functions to be called, or a function that can be called
|
||||
-- to build and return the list. If this is a function, it will be called
|
||||
-- with all of the additional arguments (below).
|
||||
-- @param ...
|
||||
-- An optional set of arguments to be passed to each of the functions as
|
||||
-- as they are called.
|
||||
---
|
||||
|
||||
function premake.callArray(funcs, ...)
|
||||
if type(funcs) == "function" then
|
||||
funcs = funcs(...)
|
||||
end
|
||||
if funcs then
|
||||
for i = 1, #funcs do
|
||||
funcs[i](...)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- TODO: THIS IMPLEMENTATION IS GOING AWAY
|
||||
|
||||
function premake.callarray(namespace, array, ...)
|
||||
local n = #array
|
||||
for i = 1, n do
|
||||
local fn = namespace[array[i]]
|
||||
if not fn then
|
||||
error(string.format("Unable to find function '%s'", array[i]))
|
||||
end
|
||||
fn(...)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Compare a version string that uses semver semantics against a
|
||||
-- version comparision string. Comparisions take the form of ">=5.0" (5.0 or
|
||||
-- later), "5.0" (5.0 or later), ">=5.0 <6.0" (5.0 or later but not 6.0 or
|
||||
-- later).
|
||||
--
|
||||
-- @param version
|
||||
-- The version to be tested.
|
||||
-- @param checks
|
||||
-- The comparision string to be evaluated.
|
||||
-- @return
|
||||
-- True if the comparisions pass, false if any fail.
|
||||
---
|
||||
|
||||
function p.checkVersion(version, checks)
|
||||
if not version then
|
||||
return false
|
||||
end
|
||||
|
||||
-- try to parse semver, if it fails, it's not semver compatible and we cannot compare, in which case
|
||||
-- we're going to ignore the checkVersion entirely, but warn.
|
||||
if not premake.isSemVer(version) then
|
||||
p.warn("'" .. version .. "' is not semver compatible, and cannot be compared against '" .. checks .. "'.");
|
||||
return true
|
||||
end
|
||||
|
||||
-- now compare the semver against the checks.
|
||||
local function eq(a, b) return a == b end
|
||||
local function le(a, b) return a <= b end
|
||||
local function lt(a, b) return a < b end
|
||||
local function ge(a, b) return a >= b end
|
||||
local function gt(a, b) return a > b end
|
||||
local function compat(a, b) return a ^ b end
|
||||
|
||||
version = semver(version)
|
||||
checks = string.explode(checks, " ", true)
|
||||
for i = 1, #checks do
|
||||
local check = checks[i]
|
||||
local func
|
||||
if check:startswith(">=") then
|
||||
func = ge
|
||||
check = check:sub(3)
|
||||
elseif check:startswith(">") then
|
||||
func = gt
|
||||
check = check:sub(2)
|
||||
elseif check:startswith("<=") then
|
||||
func = le
|
||||
check = check:sub(3)
|
||||
elseif check:startswith("<") then
|
||||
func = lt
|
||||
check = check:sub(2)
|
||||
elseif check:startswith("=") then
|
||||
func = eq
|
||||
check = check:sub(2)
|
||||
elseif check:startswith("^") then
|
||||
func = compat
|
||||
check = check:sub(2)
|
||||
else
|
||||
func = ge
|
||||
end
|
||||
|
||||
check = semver(check)
|
||||
if not func(version, check) then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
|
||||
function premake.clearWarnings()
|
||||
_warnings = {}
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Raise an error, with a formatted message built from the provided
|
||||
-- arguments.
|
||||
--
|
||||
-- @param message
|
||||
-- The error message, which may contain string formatting tokens.
|
||||
-- @param ...
|
||||
-- Values to fill in the string formatting tokens.
|
||||
--
|
||||
|
||||
function premake.error(message, ...)
|
||||
error(string.format("** Error: " .. message, ...), 0)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Finds the correct premake script filename to be run.
|
||||
--
|
||||
-- @param fname
|
||||
-- The filename of the script to run.
|
||||
-- @return
|
||||
-- The correct location and filename of the script to run.
|
||||
--
|
||||
|
||||
function premake.findProjectScript(fname)
|
||||
return os.locate(fname, fname .. ".lua", path.join(fname, "premake5.lua"), path.join(fname, "premake4.lua"))
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- "Immediate If" - returns one of the two values depending on the value
|
||||
-- of the provided condition. Note that both the true and false expressions
|
||||
-- will be evaluated regardless of the condition, even if only one result
|
||||
-- is returned.
|
||||
--
|
||||
-- @param condition
|
||||
-- A boolean condition, determining which value gets returned.
|
||||
-- @param trueValue
|
||||
-- The value to return if the condition is true.
|
||||
-- @param falseValue
|
||||
-- The value to return if the condition is false.
|
||||
-- @return
|
||||
-- One of trueValue or falseValue.
|
||||
---
|
||||
|
||||
function iif(condition, trueValue, falseValue)
|
||||
if condition then
|
||||
return trueValue
|
||||
else
|
||||
return falseValue
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Override an existing function with a new implementation; the original
|
||||
-- function is passed as the first argument to the replacement when called.
|
||||
--
|
||||
-- @param scope
|
||||
-- The table containing the function to be overridden. Use _G for
|
||||
-- global functions.
|
||||
-- @param name
|
||||
-- The name of the function to override (a string value).
|
||||
-- @param repl
|
||||
-- The replacement function. The first argument to the function
|
||||
-- will be the original implementation, followed by the arguments
|
||||
-- passed to the original call.
|
||||
---
|
||||
|
||||
function premake.override(scope, name, repl)
|
||||
scope, name = p.resolveAlias(scope, name)
|
||||
|
||||
local original = scope[name]
|
||||
if not original then
|
||||
error("unable to override '" .. name .. "'; no such function", 2)
|
||||
end
|
||||
|
||||
scope[name] = function(...)
|
||||
return repl(original, ...)
|
||||
end
|
||||
|
||||
-- Functions from premake.main are special in that they are fetched
|
||||
-- from an array, which can be modified by system and project scripts,
|
||||
-- instead of a function which would have already been called before
|
||||
-- those scripts could have run. Since the array will have already
|
||||
-- been evaluated by the time override() is called, the new value
|
||||
-- won't be picked up as it would with the function-fetched call
|
||||
-- lists. Special case the workaround for that here so everyone else
|
||||
-- can just override without having to think about the difference.
|
||||
if scope == premake.main then
|
||||
table.replace(premake.main.elements, original, scope[name])
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Find the canonical name and scope of a function, resolving any aliases.
|
||||
--
|
||||
-- @param scope
|
||||
-- The table containing the function to be overridden. Use _G for
|
||||
-- global functions.
|
||||
-- @param name
|
||||
-- The name of the function to resolve.
|
||||
-- @return
|
||||
-- The canonical scope and function name (a string value).
|
||||
---
|
||||
|
||||
function p.resolveAlias(scope, name)
|
||||
local aliases = _aliases[scope]
|
||||
if aliases then
|
||||
while aliases[name] do
|
||||
name = aliases[name]
|
||||
end
|
||||
end
|
||||
return scope, name
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Display a warning, with a formatted message built from the provided
|
||||
-- arguments.
|
||||
--
|
||||
-- @param message
|
||||
-- The warning message, which may contain string formatting tokens.
|
||||
-- @param ...
|
||||
-- Values to fill in the string formatting tokens.
|
||||
--
|
||||
|
||||
function premake.warn(message, ...)
|
||||
message = string.format(message, ...)
|
||||
if _OPTIONS.fatal then
|
||||
error(message)
|
||||
else
|
||||
term.pushColor(term.warningColor)
|
||||
io.stderr:write(string.format("** Warning: " .. message .. "\n", ...))
|
||||
term.popColor();
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Displays a warning just once per run.
|
||||
--
|
||||
-- @param key
|
||||
-- A unique key to identify this warning. Subsequent warnings messages
|
||||
-- using the same key will not be shown.
|
||||
-- @param message
|
||||
-- The warning message, which may contain string formatting tokens.
|
||||
-- @param ...
|
||||
-- Values to fill in the string formatting tokens.
|
||||
--
|
||||
|
||||
function premake.warnOnce(key, message, ...)
|
||||
if not _warnings[key] then
|
||||
_warnings[key] = true
|
||||
premake.warn(message, ...)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Display information in the term.infoColor color.
|
||||
--
|
||||
-- @param message
|
||||
-- The info message, which may contain string formatting tokens.
|
||||
-- @param ...
|
||||
-- Values to fill in the string formatting tokens.
|
||||
--
|
||||
|
||||
function premake.info(message, ...)
|
||||
message = string.format(message, ...)
|
||||
term.pushColor(term.infoColor)
|
||||
io.stdout:write(string.format("** Info: " .. message .. "\n", ...))
|
||||
term.popColor();
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- A shortcut for printing formatted output.
|
||||
--
|
||||
|
||||
function printf(msg, ...)
|
||||
print(string.format(msg, ...))
|
||||
end
|
||||
|
||||
--
|
||||
-- A shortcut for printing formatted output in verbose mode.
|
||||
--
|
||||
function verbosef(msg, ...)
|
||||
if _OPTIONS.verbose then
|
||||
print(string.format(msg, ...))
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- make a string from debug.getinfo information.
|
||||
--
|
||||
function filelineinfo(level)
|
||||
local info = debug.getinfo(level+1, "Sl")
|
||||
if info == nil then
|
||||
return nil
|
||||
end
|
||||
if info.what == "C" then
|
||||
return "C function"
|
||||
else
|
||||
local sep = iif(os.ishost('windows'), '\\', '/')
|
||||
return string.format("%s(%d)", path.translate(info.short_src, sep), info.currentline)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- check if version is semver.
|
||||
---
|
||||
|
||||
function premake.isSemVer(version)
|
||||
local sMajor, sMinor, sPatch, sPrereleaseAndBuild = version:match("^(%d+)%.?(%d*)%.?(%d*)(.-)$")
|
||||
return (type(sMajor) == 'string')
|
||||
end
|
274
Src/external_dependencies/openmpt-trunk/include/premake/src/base/action.lua
vendored
Normal file
274
Src/external_dependencies/openmpt-trunk/include/premake/src/base/action.lua
vendored
Normal file
|
@ -0,0 +1,274 @@
|
|||
---
|
||||
-- action.lua
|
||||
-- Work with the list of registered actions.
|
||||
-- Copyright (c) 2002-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.action = {}
|
||||
|
||||
local action = p.action
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Process the raw command line arguments from _ARGV to populate
|
||||
-- the _ACTION global and _ARGS table.
|
||||
--
|
||||
|
||||
_ACTION = nil
|
||||
_ARGS = {}
|
||||
|
||||
for i, arg in ipairs(_ARGV) do
|
||||
if not arg:startswith("/") and not arg:startswith("--") then
|
||||
if not _ACTION then
|
||||
_ACTION = arg
|
||||
else
|
||||
table.insert(_ARGS, arg)
|
||||
_ARGS[arg] = arg
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- The list of registered actions. Calls to newaction() will add
|
||||
-- new entries here.
|
||||
--
|
||||
|
||||
action._list = {}
|
||||
|
||||
|
||||
---
|
||||
-- Register a new action.
|
||||
--
|
||||
-- @param act
|
||||
-- The new action object.
|
||||
---
|
||||
|
||||
function action.add(act)
|
||||
-- validate the action object, at least a little bit
|
||||
local missing
|
||||
for _, field in ipairs({"description", "trigger"}) do
|
||||
if not act[field] then
|
||||
missing = field
|
||||
end
|
||||
end
|
||||
|
||||
if missing then
|
||||
local name = act.trigger or ""
|
||||
error(string.format('action "%s" needs a %s', name, missing), 3)
|
||||
end
|
||||
|
||||
if act.os ~= nil then
|
||||
p.warnOnce(act.trigger, "action '" .. act.trigger .. "' sets 'os' field, which is deprecated, use 'targetos' instead.")
|
||||
act.targetos = act.os
|
||||
act.os = nil
|
||||
end
|
||||
|
||||
action._list[act.trigger] = act
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Initialize an action.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the action to be initialized.
|
||||
---
|
||||
|
||||
function action.initialize(name)
|
||||
local a = action._list[name]
|
||||
if (a.onInitialize) then
|
||||
a.onInitialize()
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Trigger an action.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the action to be triggered.
|
||||
---
|
||||
|
||||
function action.call(name)
|
||||
local a = action._list[name]
|
||||
|
||||
if a.onStart then
|
||||
a.onStart()
|
||||
end
|
||||
|
||||
for wks in p.global.eachWorkspace() do
|
||||
local onWorkspace = a.onWorkspace or a.onSolution or a.onsolution
|
||||
if onWorkspace and not wks.external then
|
||||
onWorkspace(wks)
|
||||
end
|
||||
|
||||
for prj in p.workspace.eachproject(wks) do
|
||||
local onProject = a.onProject or a.onproject
|
||||
if onProject and not prj.external then
|
||||
onProject(prj)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
for rule in p.global.eachRule() do
|
||||
local onRule = a.onRule or a.onrule
|
||||
if onRule and not rule.external then
|
||||
onRule(rule)
|
||||
end
|
||||
end
|
||||
|
||||
if a.execute then
|
||||
a.execute()
|
||||
end
|
||||
|
||||
if a.onEnd then
|
||||
a.onEnd()
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve the current action, as determined by _ACTION.
|
||||
--
|
||||
-- @return
|
||||
-- The current action, or nil if _ACTION is nil or does not match any action.
|
||||
---
|
||||
|
||||
function action.current()
|
||||
return action.get(_ACTION)
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve an action by name.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the action to retrieve.
|
||||
-- @returns
|
||||
-- The requested action, or nil if the action does not exist.
|
||||
---
|
||||
|
||||
function action.get(name)
|
||||
return action._list[name]
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Iterator for the list of actions.
|
||||
---
|
||||
|
||||
function action.each()
|
||||
-- sort the list by trigger
|
||||
local keys = { }
|
||||
for _, act in pairs(action._list) do
|
||||
table.insert(keys, act.trigger)
|
||||
end
|
||||
table.sort(keys)
|
||||
|
||||
local i = 0
|
||||
return function()
|
||||
i = i + 1
|
||||
return action._list[keys[i]]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Determines if an action makes use of the configuration information
|
||||
-- provided by the project scripts (i.e. it is an exporter) or if it
|
||||
-- simply performs an action irregardless of configuration, in which
|
||||
-- case the baking and validation phases can be skipped.
|
||||
---
|
||||
|
||||
function action.isConfigurable(self)
|
||||
if not self then
|
||||
self = action.current() or {}
|
||||
end
|
||||
if self.onWorkspace or self.onSolution or self.onsolution then
|
||||
return true
|
||||
end
|
||||
if self.onProject or self.onproject then
|
||||
return true
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Activates a particular action.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the action to activate.
|
||||
---
|
||||
|
||||
function action.set(name)
|
||||
_ACTION = name
|
||||
|
||||
-- Some actions imply a particular operating system
|
||||
local act = action.get(name)
|
||||
if act then
|
||||
_TARGET_OS = act.targetos or _TARGET_OS
|
||||
end
|
||||
|
||||
-- Some are implemented in standalone modules
|
||||
if act and act.module then
|
||||
require(act.module)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Determines if an action supports a particular language or target type.
|
||||
--
|
||||
-- @param feature
|
||||
-- The feature to check, either a programming language or a target type.
|
||||
-- @returns
|
||||
-- True if the feature is supported, false otherwise.
|
||||
---
|
||||
|
||||
function action.supports(feature)
|
||||
if not feature then
|
||||
return true
|
||||
end
|
||||
local self = action.current()
|
||||
if not self then
|
||||
return false
|
||||
end
|
||||
|
||||
if not self.valid_languages and not self.valid_kinds then
|
||||
return true
|
||||
end
|
||||
|
||||
if self.valid_languages and table.contains(self.valid_languages, feature) then
|
||||
return true
|
||||
end
|
||||
|
||||
if self.valid_kinds and table.contains(self.valid_kinds, feature) then
|
||||
return true
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines if an action supports a particular configuration.
|
||||
-- @return
|
||||
-- True if the configuration is supported, false otherwise.
|
||||
--
|
||||
function p.action.supportsconfig(action, cfg)
|
||||
if not action then
|
||||
return false
|
||||
end
|
||||
if action.supportsconfig then
|
||||
return action.supportsconfig(cfg)
|
||||
end
|
||||
return true
|
||||
end
|
1205
Src/external_dependencies/openmpt-trunk/include/premake/src/base/api.lua
vendored
Normal file
1205
Src/external_dependencies/openmpt-trunk/include/premake/src/base/api.lua
vendored
Normal file
File diff suppressed because it is too large
Load diff
614
Src/external_dependencies/openmpt-trunk/include/premake/src/base/config.lua
vendored
Normal file
614
Src/external_dependencies/openmpt-trunk/include/premake/src/base/config.lua
vendored
Normal file
|
@ -0,0 +1,614 @@
|
|||
--
|
||||
-- config.lua
|
||||
-- Premake configuration object API
|
||||
-- Copyright (c) 2011-2015 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
|
||||
p.config = {}
|
||||
|
||||
local project = p.project
|
||||
local config = p.config
|
||||
|
||||
|
||||
---
|
||||
-- Helper function for getlinkinfo() and gettargetinfo(); builds the
|
||||
-- name parts for a configuration, for building or linking.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object being queried.
|
||||
-- @param kind
|
||||
-- The target kind (SharedLib, StaticLib).
|
||||
-- @param field
|
||||
-- One of "target" or "implib", used to locate the naming information
|
||||
-- in the configuration object (i.e. targetdir, targetname, etc.)
|
||||
-- @return
|
||||
-- A target info object; see one of getlinkinfo() or gettargetinfo()
|
||||
-- for more information.
|
||||
---
|
||||
|
||||
function config.buildtargetinfo(cfg, kind, field)
|
||||
local basedir = cfg.project.location
|
||||
|
||||
local targetdir
|
||||
if cfg.platform then
|
||||
targetdir = path.join(basedir, 'bin', cfg.platform, cfg.buildcfg)
|
||||
else
|
||||
targetdir = path.join(basedir, 'bin', cfg.buildcfg)
|
||||
end
|
||||
|
||||
local directory = cfg[field.."dir"] or cfg.targetdir or targetdir
|
||||
local basename = cfg[field.."name"] or cfg.targetname or cfg.project.name
|
||||
|
||||
local prefix = cfg[field.."prefix"] or cfg.targetprefix or ""
|
||||
local suffix = cfg[field.."suffix"] or cfg.targetsuffix or ""
|
||||
local extension = cfg[field.."extension"] or cfg.targetextension or ""
|
||||
|
||||
local bundlename = ""
|
||||
local bundlepath = ""
|
||||
|
||||
if table.contains(os.getSystemTags(cfg.system), "darwin") and (kind == p.WINDOWEDAPP or (kind == p.SHAREDLIB and cfg.sharedlibtype)) then
|
||||
bundlename = basename .. extension
|
||||
bundlepath = path.join(bundlename, iif(kind == p.SHAREDLIB and cfg.sharedlibtype == "OSXFramework", "Versions/A", "Contents/MacOS"))
|
||||
end
|
||||
|
||||
local info = {}
|
||||
info.directory = directory
|
||||
info.basename = basename .. suffix
|
||||
info.name = prefix .. info.basename .. extension
|
||||
info.extension = extension
|
||||
info.abspath = path.join(directory, info.name)
|
||||
info.fullpath = info.abspath
|
||||
info.bundlename = bundlename
|
||||
info.bundlepath = path.join(directory, bundlepath)
|
||||
info.prefix = prefix
|
||||
info.suffix = suffix
|
||||
return info
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Determine whether the given configuration can meaningfully link
|
||||
-- against the target object.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration to be tested.
|
||||
-- @param target
|
||||
-- The object to test against. This can be a library file name, or a
|
||||
-- configuration from another project.
|
||||
-- @param linkage
|
||||
-- Optional. For languages or environments that support different kinds of
|
||||
-- linking (i.e. Managed/CLR C++, which can link both managed and unmanaged
|
||||
-- libs), which one to return. One of "unmanaged", "managed". If not
|
||||
-- specified, the default for the configuration will be used.
|
||||
-- @return
|
||||
-- True if linking the target into the configuration makes sense.
|
||||
---
|
||||
|
||||
function config.canLink(cfg, target, linkage)
|
||||
|
||||
-- Have I got a project configuration? If so, I've got some checks
|
||||
-- I can do with the extra information
|
||||
|
||||
if type(target) ~= "string" then
|
||||
|
||||
-- Can't link against executables
|
||||
|
||||
if target.kind ~= "SharedLib" and target.kind ~= "StaticLib" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Can link mixed C++ with native projects
|
||||
|
||||
if cfg.language == "C++" then
|
||||
if cfg.clr == p.ON then
|
||||
return true
|
||||
end
|
||||
end
|
||||
if target.language == "C++" then
|
||||
if target.clr == p.ON then
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
-- Can't link managed and unmanaged projects
|
||||
|
||||
local cfgManaged = project.isdotnet(cfg.project) or (cfg.clr ~= p.OFF)
|
||||
local tgtManaged = project.isdotnet(target.project) or (target.clr ~= p.OFF)
|
||||
return (cfgManaged == tgtManaged)
|
||||
|
||||
end
|
||||
|
||||
-- For now, I assume that everything listed in a .NET project can be
|
||||
-- linked; unmanaged code is simply not supported
|
||||
|
||||
if project.isdotnet(cfg.project) then
|
||||
return true
|
||||
end
|
||||
|
||||
-- In C++ projects, managed dependencies must explicitly include
|
||||
-- the ".dll" extension, to distinguish from unmanaged libraries
|
||||
|
||||
local isManaged = (path.getextension(target) == ".dll")
|
||||
|
||||
-- Unmanaged projects can never link managed assemblies
|
||||
|
||||
if isManaged and cfg.clr == p.OFF then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Only allow this link it matches the requested linkage
|
||||
|
||||
return (isManaged) == (linkage == "managed")
|
||||
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines if this configuration can be linked incrementally.
|
||||
--
|
||||
|
||||
function config.canLinkIncremental(cfg)
|
||||
if cfg.kind == "StaticLib"
|
||||
or config.isOptimizedBuild(cfg)
|
||||
or cfg.flags.NoIncrementalLink then
|
||||
return false
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Check a configuration for a source code file with the specified
|
||||
-- extension. Used for locating special files, such as Windows
|
||||
-- ".def" module definition files.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object to query.
|
||||
-- @param ext
|
||||
-- The file extension for which to search.
|
||||
-- @return
|
||||
-- The full file name if found, nil otherwise.
|
||||
--
|
||||
|
||||
function config.findfile(cfg, ext)
|
||||
for _, fname in ipairs(cfg.files) do
|
||||
if fname:endswith(ext) then
|
||||
return project.getrelative(cfg.project, fname)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve linking information for a specific configuration. That is,
|
||||
-- the path information that is required to link against the library
|
||||
-- built by this configuration.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object to query.
|
||||
-- @return
|
||||
-- A table with these values:
|
||||
-- basename - the target with no directory or file extension
|
||||
-- name - the target name and extension, with no directory
|
||||
-- directory - relative path to the target, with no file name
|
||||
-- extension - the file extension
|
||||
-- prefix - the file name prefix
|
||||
-- suffix - the file name suffix
|
||||
-- fullpath - directory, name, and extension relative to project
|
||||
-- abspath - absolute directory, name, and extension
|
||||
---
|
||||
|
||||
function config.getlinkinfo(cfg)
|
||||
-- if the configuration target is a DLL, and an import library
|
||||
-- is provided, change the kind as import libraries are static.
|
||||
local kind = cfg.kind
|
||||
if project.isnative(cfg.project) then
|
||||
if cfg.system == p.WINDOWS and kind == p.SHAREDLIB and not cfg.flags.NoImportLib then
|
||||
kind = p.STATICLIB
|
||||
end
|
||||
end
|
||||
return config.buildtargetinfo(cfg, kind, "implib")
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve a list of link targets from a configuration.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object to query.
|
||||
-- @param kind
|
||||
-- The type of links to retrieve; one of:
|
||||
-- siblings - linkable sibling projects
|
||||
-- system - system (non-sibling) libraries
|
||||
-- dependencies - all sibling dependencies, including non-linkable
|
||||
-- all - return everything
|
||||
-- @param part
|
||||
-- How the link target should be expressed; one of:
|
||||
-- name - the decorated library name with no directory
|
||||
-- basename - the undecorated library name
|
||||
-- directory - just the directory, no name
|
||||
-- fullpath - full path with decorated name
|
||||
-- object - return the project object of the dependency
|
||||
-- Or, a function(original, decorated) can be supplied, in which case it
|
||||
-- will be called for each matching link, providing the original value as
|
||||
-- it was specified in links(), and the decorated value.
|
||||
-- @param linkage
|
||||
-- Optional. For languages or environments that support different kinds of
|
||||
-- linking (i.e. Managed/CLR C++, which can link both managed and unmanaged
|
||||
-- libs), which one to return. One of "unmanaged", "managed". If not
|
||||
-- specified, the default for the configuration will be used.
|
||||
-- @return
|
||||
-- An array containing the requested link target information.
|
||||
--
|
||||
|
||||
function config.getlinks(cfg, kind, part, linkage)
|
||||
local result = {}
|
||||
|
||||
-- If I'm building a list of link directories, include libdirs
|
||||
|
||||
if part == "directory" then
|
||||
table.foreachi(cfg.libdirs, function(dir)
|
||||
table.insert(result, project.getrelative(cfg.project, dir))
|
||||
end)
|
||||
end
|
||||
|
||||
-- Iterate all of the links listed in the configuration and boil
|
||||
-- them down to the requested data set
|
||||
|
||||
for i = 1, #cfg.links do
|
||||
local link = cfg.links[i]
|
||||
local item
|
||||
|
||||
-- Sort the links into "sibling" (is another project in this same
|
||||
-- workspace) and "system" (is not part of this workspace) libraries.
|
||||
|
||||
local prj = p.workspace.findproject(cfg.workspace, link)
|
||||
if prj and kind ~= "system" then
|
||||
|
||||
-- Sibling; is there a matching configuration in this project that
|
||||
-- is compatible with linking to me?
|
||||
|
||||
local prjcfg = project.getconfig(prj, cfg.buildcfg, cfg.platform)
|
||||
if prjcfg and (kind == "dependencies" or config.canLink(cfg, prjcfg)) then
|
||||
|
||||
-- Yes; does the caller want the whole project config or only part?
|
||||
if part == "object" then
|
||||
item = prjcfg
|
||||
else
|
||||
item = project.getrelative(cfg.project, prjcfg.linktarget.fullpath)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
elseif not prj and (kind == "system" or kind == "all") then
|
||||
|
||||
-- Make sure this library makes sense for the requested linkage; don't
|
||||
-- link managed .DLLs into unmanaged code, etc.
|
||||
|
||||
if config.canLink(cfg, link, linkage) then
|
||||
-- if the target is listed via an explicit path (i.e. not a
|
||||
-- system library or assembly), make it project-relative
|
||||
item = link
|
||||
if item:find("/", nil, true) then
|
||||
item = project.getrelative(cfg.project, item)
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
-- If this is something I can link against, pull out the requested part
|
||||
-- dont link against my self
|
||||
if item and item ~= cfg then
|
||||
if part == "directory" then
|
||||
item = path.getdirectory(item)
|
||||
if item == "." then
|
||||
item = nil
|
||||
end
|
||||
elseif part == "name" then
|
||||
item = path.getname(item)
|
||||
elseif part == "basename" then
|
||||
item = path.getbasename(item)
|
||||
elseif type(part) == "function" then
|
||||
part(link, item)
|
||||
end
|
||||
end
|
||||
|
||||
-- Add it to the list, skipping duplicates
|
||||
|
||||
if item and not table.contains(result, item) then
|
||||
table.insert(result, item)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns the list of sibling target directories
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object to query.
|
||||
-- @return
|
||||
-- Absolute path list
|
||||
--
|
||||
function config.getsiblingtargetdirs(cfg)
|
||||
local paths = {}
|
||||
for _, sibling in ipairs(config.getlinks(cfg, "siblings", "object")) do
|
||||
if (sibling.kind == p.SHAREDLIB) then
|
||||
local p = sibling.linktarget.directory
|
||||
if not (table.contains(paths, p)) then
|
||||
table.insert(paths, p)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return paths
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines the correct runtime library for a configuration.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object to query.
|
||||
-- @return
|
||||
-- A string identifying the runtime library, one of
|
||||
-- StaticDebug, StaticRelease, SharedDebug, SharedRelease.
|
||||
--
|
||||
|
||||
function config.getruntime(cfg)
|
||||
if (not cfg.staticruntime or cfg.staticruntime == "Default") and not cfg.runtime then
|
||||
return nil -- indicate that no runtime was explicitly selected
|
||||
end
|
||||
|
||||
local linkage = iif(cfg.staticruntime == "On", "Static", "Shared") -- assume 'Shared' is default?
|
||||
|
||||
if not cfg.runtime then
|
||||
return linkage .. iif(config.isDebugBuild(cfg), "Debug", "Release")
|
||||
else
|
||||
return linkage .. cfg.runtime
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve information about a configuration's build target.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration object to query.
|
||||
-- @return
|
||||
-- A table with these values:
|
||||
-- basename - the target with no directory or file extension
|
||||
-- name - the target name and extension, with no directory
|
||||
-- directory - relative path to the target, with no file name
|
||||
-- extension - the file extension
|
||||
-- prefix - the file name prefix
|
||||
-- suffix - the file name suffix
|
||||
-- fullpath - directory, name, and extension, relative to project
|
||||
-- abspath - absolute directory, name, and extension
|
||||
-- bundlepath - the relative path and file name of the bundle
|
||||
--
|
||||
|
||||
function config.gettargetinfo(cfg)
|
||||
return config.buildtargetinfo(cfg, cfg.kind, "target")
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Returns true if any of the files in the provided container pass the
|
||||
-- provided test function.
|
||||
---
|
||||
|
||||
function config.hasFile(self, testfn)
|
||||
local files = self.files
|
||||
for i = 1, #files do
|
||||
if testfn(files[i]) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Determine if the specified library or assembly reference should be copied
|
||||
-- to the build's target directory. "Copy Local" is the terminology used by
|
||||
-- Visual Studio C# projects for this feature.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration to query. Can be a project (and will be for C#
|
||||
-- projects).
|
||||
-- @param linkname
|
||||
-- The name of the library or assembly reference to check. This should
|
||||
-- match the name as it was provided in the call to links().
|
||||
-- @param default
|
||||
-- The value to return if the library is not mentioned in any settings.
|
||||
-- @return
|
||||
-- True if the library should be copied local, false otherwise.
|
||||
--
|
||||
|
||||
function config.isCopyLocal(cfg, linkname, default)
|
||||
if cfg.flags.NoCopyLocal then
|
||||
return false
|
||||
end
|
||||
|
||||
if #cfg.copylocal > 0 then
|
||||
return table.contains(cfg.copylocal, linkname)
|
||||
end
|
||||
|
||||
return default
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determine if a configuration represents a "debug" or "release" build.
|
||||
-- This controls the runtime library selected for Visual Studio builds
|
||||
-- (and might also be useful elsewhere).
|
||||
--
|
||||
|
||||
function config.isDebugBuild(cfg)
|
||||
return cfg.symbols ~= nil and
|
||||
cfg.symbols ~= p.OFF and
|
||||
cfg.symbols ~= "Default" and
|
||||
not config.isOptimizedBuild(cfg)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determine if this configuration uses one of the optimize flags.
|
||||
-- Optimized builds get different treatment, such as full linking
|
||||
-- instead of incremental.
|
||||
--
|
||||
|
||||
function config.isOptimizedBuild(cfg)
|
||||
return cfg.optimize ~= nil and cfg.optimize ~= p.OFF and cfg.optimize ~= "Debug"
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Does this configuration's list of links contain the specified
|
||||
-- project? Performs a case-insensitive search for the project's
|
||||
-- name in the configuration's link array.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration to query.
|
||||
-- @param prjName
|
||||
-- The name of the project for which to search.
|
||||
-- @return
|
||||
-- True if the project name is found in the configuration's
|
||||
-- list of links; nil otherwise.
|
||||
--
|
||||
|
||||
function config.linksToProject(cfg, prjName)
|
||||
prjName = prjName:lower()
|
||||
local n = #cfg.links
|
||||
for i = 1,n do
|
||||
if cfg.links[i]:lower() == prjName then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Map the values contained in the configuration to an array of flags.
|
||||
--
|
||||
-- @param cfg
|
||||
-- The configuration to map.
|
||||
-- @param mappings
|
||||
-- A mapping from configuration fields and values to flags. See
|
||||
-- the GCC tool interface for examples of these mappings.
|
||||
-- @return
|
||||
-- An array containing the translated flags.
|
||||
--
|
||||
|
||||
function config.mapFlags(cfg, mappings)
|
||||
local flags = {}
|
||||
|
||||
-- Helper function to append replacement values to the result
|
||||
|
||||
local function add(replacement)
|
||||
if type(replacement) == "function" then
|
||||
replacement = replacement(cfg)
|
||||
end
|
||||
table.insertflat(flags, replacement)
|
||||
end
|
||||
|
||||
-- To ensure we get deterministic results that don't change as more keys
|
||||
-- are added to the map, and to open the possibility to controlling the
|
||||
-- application order of flags, use a prioritized list of fields to order
|
||||
-- the mapping, even though it takes a little longer.
|
||||
|
||||
for field in p.field.eachOrdered() do
|
||||
local map = mappings[field.name]
|
||||
if type(map) == "function" then
|
||||
map = map(cfg, mappings)
|
||||
end
|
||||
if map then
|
||||
|
||||
-- Pass each cfg value in the list through the map and append the
|
||||
-- replacement, if any, to the result
|
||||
|
||||
local values = cfg[field.name]
|
||||
if type(values) == "boolean" then
|
||||
values = iif(values, "On", "Off")
|
||||
end
|
||||
if type(values) ~= "table" then
|
||||
values = { values }
|
||||
end
|
||||
|
||||
local foundValue = false
|
||||
table.foreachi(values, function(value)
|
||||
local replacement = map[value]
|
||||
if replacement ~= nil then
|
||||
foundValue = true
|
||||
add(replacement)
|
||||
end
|
||||
end)
|
||||
|
||||
-- If no value was mapped, check to see if the map specifies a
|
||||
-- default value and, if so, push that into the result
|
||||
|
||||
if not foundValue then
|
||||
add(map._)
|
||||
end
|
||||
|
||||
-- Finally, check for "not values", which should be added to the
|
||||
-- result if the corresponding value is not present
|
||||
|
||||
for key, replacement in pairs(map) do
|
||||
if #key > 1 and key:startswith("_") then
|
||||
key = key:sub(2)
|
||||
if values[key] == nil then
|
||||
add(replacement)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
return flags
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Returns both a project configuration and a file configuration from a
|
||||
-- configuration argument that could be either.
|
||||
--
|
||||
-- @param cfg
|
||||
-- A project or file configuration object.
|
||||
-- @return
|
||||
-- Both a project configuration and a file configuration. If the input
|
||||
-- argument is a project configuration, the file configuration value is
|
||||
-- returned as nil.
|
||||
---
|
||||
|
||||
function config.normalize(cfg)
|
||||
if cfg and cfg.config ~= nil then
|
||||
return cfg.config, cfg
|
||||
else
|
||||
return cfg, nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Return the appropriate toolset adapter for the provided configuration,
|
||||
-- or nil if no toolset is specified. If a specific version was provided,
|
||||
-- returns that as a second argument.
|
||||
---
|
||||
|
||||
function config.toolset(cfg)
|
||||
if cfg.toolset then
|
||||
return p.tools.canonical(cfg.toolset)
|
||||
end
|
||||
end
|
499
Src/external_dependencies/openmpt-trunk/include/premake/src/base/configset.lua
vendored
Normal file
499
Src/external_dependencies/openmpt-trunk/include/premake/src/base/configset.lua
vendored
Normal file
|
@ -0,0 +1,499 @@
|
|||
--
|
||||
-- base/configset.lua
|
||||
--
|
||||
-- A configuration set manages a collection of fields, which are organized
|
||||
-- into "blocks". Each block stores a set of field-value pairs, along with
|
||||
-- a list of terms which indicate the context in which those field values
|
||||
-- should be applied.
|
||||
--
|
||||
-- Configurations use the field definitions to know what fields are available,
|
||||
-- and the corresponding value types for those fields. Only fields that have
|
||||
-- been registered via field.new() can be stored.
|
||||
--
|
||||
-- TODO: I may roll this functionality up into the container API at some
|
||||
-- point. If you find yourself using or extending this code for your own
|
||||
-- work give me a shout before you go too far with it so we can coordinate.
|
||||
--
|
||||
-- Copyright (c) 2012-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
|
||||
p.configset = {}
|
||||
|
||||
local configset = p.configset
|
||||
local criteria = p.criteria
|
||||
|
||||
|
||||
--
|
||||
-- Create a new configuration set.
|
||||
--
|
||||
-- @param parent
|
||||
-- An optional parent configuration set. If provided, the parent provides
|
||||
-- a base configuration, which this set will extend.
|
||||
-- @return
|
||||
-- A new, empty configuration set.
|
||||
--
|
||||
|
||||
function configset.new(parent)
|
||||
local cset = {}
|
||||
cset.parent = parent
|
||||
cset.blocks = {}
|
||||
cset.current = nil
|
||||
cset.compiled = false
|
||||
return cset
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve a value from the configuration set.
|
||||
--
|
||||
-- This and the criteria supporting code are the inner loops of the app. Some
|
||||
-- readability has been sacrificed for overall performance.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set to query.
|
||||
-- @param field
|
||||
-- The definition of field to be queried.
|
||||
-- @param filter
|
||||
-- A list of lowercase context terms to use during the fetch. Only those
|
||||
-- blocks with terms fully contained by this list will be considered in
|
||||
-- determining the returned value. Terms should be lower case to make
|
||||
-- the context filtering case-insensitive.
|
||||
-- @param ctx
|
||||
-- The context that will be used for detoken.expand
|
||||
-- @param origin
|
||||
-- The originating configset if set.
|
||||
-- @return
|
||||
-- The requested value.
|
||||
---
|
||||
|
||||
function configset.fetch(cset, field, filter, ctx, origin)
|
||||
filter = filter or {}
|
||||
ctx = ctx or {}
|
||||
|
||||
if p.field.merges(field) then
|
||||
return configset._fetchMerged(cset, field, filter, ctx, origin)
|
||||
else
|
||||
return configset._fetchDirect(cset, field, filter, ctx, origin)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function configset._dofilter(cset, block, filter)
|
||||
if not filter.matcher then
|
||||
return (cset.compiled or criteria.matches(block._criteria, filter))
|
||||
else
|
||||
return filter.matcher(cset, block, filter)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function configset._fetchDirect(cset, field, filter, ctx, origin)
|
||||
-- If the originating configset hasn't been compiled, then the value will still
|
||||
-- be on that configset.
|
||||
if origin and origin ~= cset and not origin.compiled then
|
||||
return configset._fetchDirect(origin, field, filter, ctx, origin)
|
||||
end
|
||||
|
||||
local abspath = filter.files
|
||||
local basedir
|
||||
|
||||
local key = field.name
|
||||
local blocks = cset.blocks
|
||||
local n = #blocks
|
||||
for i = n, 1, -1 do
|
||||
local block = blocks[i]
|
||||
|
||||
if not origin or block._origin == origin then
|
||||
local value = block[key]
|
||||
|
||||
-- If the filter contains a file path, make it relative to
|
||||
-- this block's basedir
|
||||
if value ~= nil and abspath and not cset.compiled and block._basedir and block._basedir ~= basedir then
|
||||
basedir = block._basedir
|
||||
filter.files = path.getrelative(basedir, abspath)
|
||||
end
|
||||
|
||||
if value ~= nil and configset._dofilter(cset, block, filter) then
|
||||
-- If value is an object, return a copy of it so that any
|
||||
-- changes later made to it by the caller won't alter the
|
||||
-- original value (that was a tough bug to find)
|
||||
if type(value) == "table" then
|
||||
value = table.deepcopy(value)
|
||||
end
|
||||
-- Detoken
|
||||
if field.tokens and ctx.environ then
|
||||
value = p.detoken.expand(value, ctx.environ, field, ctx._basedir)
|
||||
end
|
||||
return value
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
filter.files = abspath
|
||||
|
||||
if cset.parent then
|
||||
return configset._fetchDirect(cset.parent, field, filter, ctx, origin)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function configset._fetchMerged(cset, field, filter, ctx, origin)
|
||||
-- If the originating configset hasn't been compiled, then the value will still
|
||||
-- be on that configset.
|
||||
if origin and origin ~= cset and not origin.compiled then
|
||||
return configset._fetchMerged(origin, field, filter, ctx, origin)
|
||||
end
|
||||
|
||||
local result = {}
|
||||
|
||||
local function remove(patterns)
|
||||
for _, pattern in ipairs(patterns) do
|
||||
-- Detoken
|
||||
if field.tokens and ctx.environ then
|
||||
pattern = p.detoken.expand(pattern, ctx.environ, field, ctx._basedir)
|
||||
end
|
||||
pattern = path.wildcards(pattern):lower()
|
||||
|
||||
local j = 1
|
||||
while j <= #result do
|
||||
local value = result[j]:lower()
|
||||
if value:match(pattern) == value then
|
||||
result[result[j]] = nil
|
||||
table.remove(result, j)
|
||||
else
|
||||
j = j + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if cset.parent then
|
||||
result = configset._fetchMerged(cset.parent, field, filter, ctx, origin)
|
||||
end
|
||||
|
||||
local abspath = filter.files
|
||||
local basedir
|
||||
|
||||
local key = field.name
|
||||
local blocks = cset.blocks
|
||||
local n = #blocks
|
||||
for i = 1, n do
|
||||
local block = blocks[i]
|
||||
if not origin or block._origin == origin then
|
||||
-- If the filter contains a file path, make it relative to
|
||||
-- this block's basedir
|
||||
if abspath and block._basedir and block._basedir ~= basedir and not cset.compiled then
|
||||
basedir = block._basedir
|
||||
filter.files = path.getrelative(basedir, abspath)
|
||||
end
|
||||
|
||||
if configset._dofilter(cset, block, filter) then
|
||||
if block._removes and block._removes[key] then
|
||||
remove(block._removes[key])
|
||||
end
|
||||
|
||||
local value = block[key]
|
||||
-- If value is an object, return a copy of it so that any
|
||||
-- changes later made to it by the caller won't alter the
|
||||
-- original value (that was a tough bug to find)
|
||||
if type(value) == "table" then
|
||||
value = table.deepcopy(value)
|
||||
end
|
||||
|
||||
if value then
|
||||
-- Detoken
|
||||
if field.tokens and ctx.environ then
|
||||
value = p.detoken.expand(value, ctx.environ, field, ctx._basedir)
|
||||
end
|
||||
-- Translate
|
||||
if field and p.field.translates(field) then
|
||||
value = p.field.translate(field, value)
|
||||
end
|
||||
|
||||
result = p.field.merge(field, result, value)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
filter.files = abspath
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create and return a metatable which allows a configuration set to act as a
|
||||
-- "backing store" for a regular Lua table. Table operations that access a
|
||||
-- registered field will fetch from or store to the configurations set, while
|
||||
-- unknown keys are get and set to the table normally.
|
||||
---
|
||||
|
||||
function configset.metatable(cset)
|
||||
return {
|
||||
__newindex = function(tbl, key, value)
|
||||
local f = p.field.get(key)
|
||||
if f then
|
||||
local status, err = configset.store(cset, f, value)
|
||||
if err then
|
||||
error(err, 2)
|
||||
end
|
||||
else
|
||||
rawset(tbl, key, value)
|
||||
return value
|
||||
end
|
||||
end,
|
||||
__index = function(tbl, key)
|
||||
local f = p.field.get(key)
|
||||
if f then
|
||||
return configset.fetch(cset, f)
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create a new block of configuration field-value pairs, using a set of
|
||||
-- old-style, non-prefixed context terms to control their application. This
|
||||
-- approach will eventually be phased out in favor of prefixed filters;
|
||||
-- see addFilter() below.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set to hold the new block.
|
||||
-- @param terms
|
||||
-- A set of context terms to control the application of values contained
|
||||
-- in the block.
|
||||
-- @param basedir
|
||||
-- An optional base directory; if set, filename filter tests will be made
|
||||
-- relative to this basis before pattern testing.
|
||||
-- @return
|
||||
-- The new configuration data block.
|
||||
---
|
||||
|
||||
function configset.addblock(cset, terms, basedir)
|
||||
configset.addFilter(cset, terms, basedir, true)
|
||||
return cset.current
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create a new block of configuration field-value pairs, using a set
|
||||
-- of new-style, prefixed context terms to control their application.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set to hold the new block.
|
||||
-- @param terms
|
||||
-- A set of terms used to control the application of the values
|
||||
-- contained in the block.
|
||||
-- @param basedir
|
||||
-- An optional base directory. If set, filename filter tests will be
|
||||
-- made relative to this base before pattern testing.
|
||||
-- @param unprefixed
|
||||
-- If true, uses the old, unprefixed style for filter terms. This will
|
||||
-- eventually be phased out in favor of prefixed filters.
|
||||
---
|
||||
|
||||
function configset.addFilter(cset, terms, basedir, unprefixed)
|
||||
local crit, err = criteria.new(terms, unprefixed)
|
||||
if not crit then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
local block = {}
|
||||
block._criteria = crit
|
||||
block._origin = cset
|
||||
|
||||
if basedir then
|
||||
block._basedir = basedir:lower()
|
||||
end
|
||||
|
||||
table.insert(cset.blocks, block)
|
||||
cset.current = block
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Allow calling code to save and restore a filter. Particularly useful for
|
||||
-- modules.
|
||||
---
|
||||
function configset.getFilter(cset)
|
||||
return {
|
||||
_criteria = cset.current._criteria,
|
||||
_basedir = cset.current._basedir
|
||||
}
|
||||
end
|
||||
|
||||
function configset.setFilter(cset, filter)
|
||||
local block = {}
|
||||
block._criteria = filter._criteria
|
||||
block._basedir = filter._basedir
|
||||
block._origin = cset
|
||||
table.insert(cset.blocks, block)
|
||||
cset.current = block;
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Add a new field-value pair to the current configuration data block. The
|
||||
-- data type of the field is taken into account when adding the values:
|
||||
-- strings are replaced, arrays are merged, etc.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set to hold the new value.
|
||||
-- @param fieldname
|
||||
-- The name of the field being set. The field should have already been
|
||||
-- defined using the api.register() function.
|
||||
-- @param value
|
||||
-- The new value for the field.
|
||||
-- @return
|
||||
-- If successful, returns true. If an error occurred, returns nil and
|
||||
-- an error message.
|
||||
---
|
||||
|
||||
function configset.store(cset, field, value)
|
||||
if not cset.current then
|
||||
configset.addblock(cset, {})
|
||||
end
|
||||
|
||||
local key = field.name
|
||||
local current = cset.current
|
||||
|
||||
local status, result = pcall(function ()
|
||||
current[key] = p.field.store(field, current[key], value)
|
||||
end)
|
||||
|
||||
if not status then
|
||||
if type(result) == "table" then
|
||||
result = result.msg
|
||||
end
|
||||
return nil, result
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Remove values from a configuration set.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set from which to remove.
|
||||
-- @param field
|
||||
-- The field holding the values to be removed.
|
||||
-- @param values
|
||||
-- A list of values to be removed.
|
||||
--
|
||||
|
||||
function configset.remove(cset, field, values)
|
||||
-- removes are always processed first; starting a new block here
|
||||
-- ensures that they will be processed in the proper order
|
||||
local block = {}
|
||||
block._basedir = cset.current._basedir
|
||||
block._criteria = cset.current._criteria
|
||||
block._origin = cset
|
||||
table.insert(cset.blocks, block)
|
||||
cset.current = block
|
||||
|
||||
-- TODO This comment is not completely valid anymore
|
||||
-- This needs work; right now it is hardcoded to only work for lists.
|
||||
-- To support removing from keyed collections, I first need to figure
|
||||
-- out how to move the wildcard():lower() bit into the value
|
||||
-- processing call chain (i.e. that should happen somewhere inside of
|
||||
-- the field.remove() call). And then I will probably need to add
|
||||
-- another accessor to actually do the removing, which right now is
|
||||
-- hardcoded inside of _fetchMerged(). Oh, and some of the logic in
|
||||
-- api.remove() needs to get pushed down to here (or field).
|
||||
|
||||
values = p.field.remove(field, {}, values)
|
||||
|
||||
-- add a list of removed values to the block
|
||||
current = cset.current
|
||||
current._removes = {}
|
||||
current._removes[field.name] = values
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Check to see if a configuration set is empty; that is, it does
|
||||
-- not contain any configuration blocks.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set to query.
|
||||
-- @return
|
||||
-- True if the set does not contain any blocks.
|
||||
--
|
||||
|
||||
function configset.empty(cset)
|
||||
return (#cset.blocks == 0)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Compiles a new configuration set containing only the blocks which match
|
||||
-- the specified criteria. Fetches against this compiled configuration set
|
||||
-- may omit the context argument, resulting in faster fetches against a
|
||||
-- smaller set of configuration blocks.
|
||||
--
|
||||
-- @param cset
|
||||
-- The configuration set to query.
|
||||
-- @param filter
|
||||
-- A list of lowercase context terms to use during the fetch. Only those
|
||||
-- blocks with terms fully contained by this list will be considered in
|
||||
-- determining the returned value. Terms should be lower case to make
|
||||
-- the context filtering case-insensitive.
|
||||
-- @return
|
||||
-- A new configuration set containing only the selected blocks, and the
|
||||
-- "compiled" field set to true.
|
||||
--
|
||||
|
||||
function configset.compile(cset, filter)
|
||||
-- always start with the parent
|
||||
local result
|
||||
if cset.parent then
|
||||
result = configset.compile(cset.parent, filter)
|
||||
else
|
||||
result = configset.new()
|
||||
end
|
||||
|
||||
local blocks = cset.blocks
|
||||
local n = #blocks
|
||||
|
||||
local abspath = filter.files
|
||||
local basedir
|
||||
|
||||
for i = 1, n do
|
||||
local block = blocks[i]
|
||||
if block._origin == cset then
|
||||
block._origin = result
|
||||
end
|
||||
|
||||
-- If the filter contains a file path, make it relative to
|
||||
-- this block's basedir
|
||||
if abspath and block._basedir and block._basedir ~= basedir then
|
||||
basedir = block._basedir
|
||||
filter.files = path.getrelative(basedir, abspath)
|
||||
end
|
||||
|
||||
if criteria.matches(block._criteria, filter) then
|
||||
table.insert(result.blocks, block)
|
||||
end
|
||||
end
|
||||
|
||||
filter.files = abspath
|
||||
|
||||
result.compiled = true
|
||||
return result
|
||||
end
|
361
Src/external_dependencies/openmpt-trunk/include/premake/src/base/container.lua
vendored
Normal file
361
Src/external_dependencies/openmpt-trunk/include/premake/src/base/container.lua
vendored
Normal file
|
@ -0,0 +1,361 @@
|
|||
---
|
||||
-- container.lua
|
||||
-- Implementation of configuration containers.
|
||||
-- Copyright (c) 2014 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.container = {}
|
||||
|
||||
local container = p.container
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Keep a master dictionary of container class, so they can be easily looked
|
||||
-- up by name (technically you could look at premake["name"] but that is just
|
||||
-- a coding convention and I don't want to count on it)
|
||||
---
|
||||
|
||||
container.classes = {}
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Define a new class of containers.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the new container class. Used wherever the class needs to
|
||||
-- be shown to the end user in a readable way.
|
||||
-- @param parent (optional)
|
||||
-- If this class of container is intended to be contained within another,
|
||||
-- the containing class object.
|
||||
-- @param extraScopes (optional)
|
||||
-- Each container can hold fields scoped to itself (by putting the container's
|
||||
-- class name into its scope attribute), or any of the container's children.
|
||||
-- If a container can hold scopes other than these (i.e. "config"), it can
|
||||
-- provide a list of those scopes in this argument.
|
||||
-- @return
|
||||
-- If successful, the new class descriptor object (a table). Otherwise,
|
||||
-- returns nil and an error message.
|
||||
---
|
||||
|
||||
function container.newClass(name, parent, extraScopes)
|
||||
local class = p.configset.new(parent)
|
||||
class.name = name
|
||||
class.pluralName = name:plural()
|
||||
class.containedClasses = {}
|
||||
class.extraScopes = extraScopes
|
||||
|
||||
if parent then
|
||||
table.insert(parent.containedClasses, class)
|
||||
end
|
||||
|
||||
container.classes[name] = class
|
||||
return class
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create a new instance of a configuration container. This is just the
|
||||
-- generic base implementation, each container class will define their
|
||||
-- own version.
|
||||
--
|
||||
-- @param parent
|
||||
-- The class of container being instantiated.
|
||||
-- @param name
|
||||
-- The name for the new container instance.
|
||||
-- @return
|
||||
-- A new container instance.
|
||||
---
|
||||
|
||||
function container.new(class, name)
|
||||
local self = p.configset.new()
|
||||
setmetatable(self, p.configset.metatable(self))
|
||||
|
||||
self.class = class
|
||||
self.name = name
|
||||
self.filename = name
|
||||
self.script = _SCRIPT
|
||||
self.basedir = os.getcwd()
|
||||
self.external = false
|
||||
|
||||
for childClass in container.eachChildClass(class) do
|
||||
self[childClass.pluralName] = {}
|
||||
end
|
||||
|
||||
return self
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Add a new child to an existing container instance.
|
||||
--
|
||||
-- @param self
|
||||
-- The container instance to hold the child.
|
||||
-- @param child
|
||||
-- The child container instance.
|
||||
---
|
||||
|
||||
function container.addChild(self, child)
|
||||
local children = self[child.class.pluralName]
|
||||
table.insert(children, child)
|
||||
children[child.name] = child
|
||||
|
||||
child.parent = self
|
||||
child[self.class.name] = self
|
||||
if self.class.alias then
|
||||
child[self.class.alias] = self
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Process the contents of a container, which were populated by the project
|
||||
-- script, in preparation for doing work on the results, such as exporting
|
||||
-- project files.
|
||||
---
|
||||
|
||||
function container.bake(self)
|
||||
if self._isBaked then
|
||||
return self
|
||||
end
|
||||
self._isBaked = true
|
||||
|
||||
local ctx = p.context.new(self)
|
||||
|
||||
for key, value in pairs(self) do
|
||||
ctx[key] = value
|
||||
end
|
||||
|
||||
local parent = self.parent
|
||||
if parent then
|
||||
ctx[parent.class.name] = parent
|
||||
end
|
||||
|
||||
for class in container.eachChildClass(self.class) do
|
||||
for child in container.eachChild(self, class) do
|
||||
child.parent = ctx
|
||||
child[self.class.name] = ctx
|
||||
end
|
||||
end
|
||||
|
||||
if type(self.class.bake) == "function" then
|
||||
self.class.bake(ctx)
|
||||
end
|
||||
|
||||
return ctx
|
||||
end
|
||||
|
||||
|
||||
function container.bakeChildren(self)
|
||||
for class in container.eachChildClass(self.class) do
|
||||
local children = self[class.pluralName]
|
||||
for i = 1, #children do
|
||||
local ctx = container.bake(children[i])
|
||||
children[i] = ctx
|
||||
children[ctx.name] = ctx
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Returns true if the container can hold any of the specified field scopes.
|
||||
--
|
||||
-- @param class
|
||||
-- The container class to test.
|
||||
-- @param scope
|
||||
-- A scope string (e.g. "project", "config") or an array of scope strings.
|
||||
-- @return
|
||||
-- True if this container can hold any of the specified scopes.
|
||||
---
|
||||
|
||||
function container.classCanContain(class, scope)
|
||||
if type(scope) == "table" then
|
||||
for i = 1, #scope do
|
||||
if container.classCanContain(class, scope[i]) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
-- if I have child classes, check with them first, since scopes
|
||||
-- are usually specified for leaf nodes in the hierarchy
|
||||
for child in container.eachChildClass(class) do
|
||||
if (container.classCanContain(child, scope)) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
if class.name == scope or class.alias == scope then
|
||||
return true
|
||||
end
|
||||
|
||||
-- is it in my extra scopes list?
|
||||
if class.extraScopes and table.contains(class.extraScopes, scope) then
|
||||
return true
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Return true if a container class is or inherits from the
|
||||
-- specified class.
|
||||
--
|
||||
-- @param class
|
||||
-- The container class to be tested.
|
||||
-- @param scope
|
||||
-- The name of the class to be checked against. If the container
|
||||
-- class matches this scope (i.e. class is a project and the
|
||||
-- scope is "project"), or if it is a parent object of it (i.e.
|
||||
-- class is a workspace and scope is "project"), then returns
|
||||
-- true.
|
||||
---
|
||||
|
||||
function container.classIsA(class, scope)
|
||||
while class do
|
||||
if class.name == scope or class.alias == scope then
|
||||
return true
|
||||
end
|
||||
class = class.parent
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Enumerate all of the registered child classes of a specific container class.
|
||||
--
|
||||
-- @param class
|
||||
-- The container class to be enumerated.
|
||||
-- @return
|
||||
-- An iterator function for the container's child classes.
|
||||
---
|
||||
|
||||
function container.eachChildClass(class)
|
||||
local children = class.containedClasses
|
||||
local i = 0
|
||||
return function ()
|
||||
i = i + 1
|
||||
if i <= #children then
|
||||
return children[i]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Enumerate all of the registered child instances of a specific container.
|
||||
--
|
||||
-- @param self
|
||||
-- The container to be queried.
|
||||
-- @param class
|
||||
-- The class of child containers to be enumerated.
|
||||
-- @return
|
||||
-- An iterator function for the container's child classes.
|
||||
---
|
||||
|
||||
function container.eachChild(self, class)
|
||||
local children = self[class.pluralName]
|
||||
local i = 0
|
||||
return function ()
|
||||
i = i + 1
|
||||
if i <= #children then
|
||||
return children[i]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve the child container with the specified class and name.
|
||||
--
|
||||
-- @param self
|
||||
-- The container instance to query.
|
||||
-- @param class
|
||||
-- The class of the child container to be fetched.
|
||||
-- @param name
|
||||
-- The name of the child container to be fetched.
|
||||
-- @return
|
||||
-- The child instance if it exists, nil otherwise.
|
||||
---
|
||||
|
||||
function container.getChild(self, class, name)
|
||||
local children = self[class.pluralName]
|
||||
return children[name]
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve a container class object.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the container class to retrieve.
|
||||
-- @return
|
||||
-- The container class object if it exists, nil otherwise.
|
||||
---
|
||||
|
||||
function container.getClass(name)
|
||||
return container.classes[name]
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Determine if the container contains a child of the specified class which
|
||||
-- meets the criteria of a testing function.
|
||||
--
|
||||
-- @param self
|
||||
-- The container to be queried.
|
||||
-- @param class
|
||||
-- The class of the child containers to be enumerated.
|
||||
-- @param func
|
||||
-- A function that takes a child container as its only argument, and
|
||||
-- returns true if it meets the selection criteria for the call.
|
||||
-- @return
|
||||
-- True if the test function returns true for any child.
|
||||
---
|
||||
|
||||
function container.hasChild(self, class, func)
|
||||
for child in container.eachChild(self, class) do
|
||||
if func(child) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Call out to the container validation to make sure everything
|
||||
-- is as it should be before handing off to the actions.
|
||||
---
|
||||
|
||||
function container.validate(self)
|
||||
if type(self.class.validate) == "function" then
|
||||
self.class.validate(self)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function container.validateChildren(self)
|
||||
for class in container.eachChildClass(self.class) do
|
||||
local children = self[class.pluralName]
|
||||
for i = 1, #children do
|
||||
container.validate(children[i])
|
||||
end
|
||||
end
|
||||
end
|
256
Src/external_dependencies/openmpt-trunk/include/premake/src/base/context.lua
vendored
Normal file
256
Src/external_dependencies/openmpt-trunk/include/premake/src/base/context.lua
vendored
Normal file
|
@ -0,0 +1,256 @@
|
|||
--
|
||||
-- base/context.lua
|
||||
--
|
||||
-- Provide a context for pulling out values from a configuration set. Each
|
||||
-- context has an associated list of terms which constrain the values that
|
||||
-- it will retrieve, i.e. "Windows, "Debug", "x64", and so on.
|
||||
--
|
||||
-- The context also provides caching for the values returned from the set.
|
||||
--
|
||||
-- TODO: I may roll this functionality up into the container API at some
|
||||
-- point. If you find yourself using or extending this code for your own
|
||||
-- work give me a shout before you go too far with it so we can coordinate.
|
||||
--
|
||||
-- Copyright (c) 2012-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
|
||||
p.context = {}
|
||||
|
||||
local context = p.context
|
||||
local configset = p.configset
|
||||
|
||||
|
||||
--
|
||||
-- Create a new context object.
|
||||
--
|
||||
-- @param cfgset
|
||||
-- The configuration set to provide the data from this context.
|
||||
-- @param environ
|
||||
-- An optional key-value environment table for token expansion; keys and
|
||||
-- values provided in this table will be available for tokens to use.
|
||||
-- @param filename
|
||||
-- An optional filename, which will limit the fetched results to blocks
|
||||
-- which specifically match the provided name.
|
||||
-- @return
|
||||
-- A new context object.
|
||||
--
|
||||
|
||||
function context.new(cfgset, environ)
|
||||
local ctx = {}
|
||||
ctx._cfgset = cfgset
|
||||
ctx.environ = environ or {}
|
||||
ctx.terms = {}
|
||||
|
||||
-- This base directory is used when expanding path tokens encountered
|
||||
-- in non-path value; such values will be made relative to this value
|
||||
-- so the resulting projects will only contain relative paths. It is
|
||||
-- expected that the creator of the context will set this value using
|
||||
-- the setbasedir() function.
|
||||
|
||||
ctx._basedir = os.getcwd()
|
||||
|
||||
-- when a missing field is requested, fetch it from my config
|
||||
-- set, and then cache the value for future lookups
|
||||
setmetatable(ctx, context.__mt)
|
||||
|
||||
return ctx
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Create an extended and uncached context based on another context object.
|
||||
--
|
||||
-- @param baseContext
|
||||
-- The base context to extent
|
||||
-- @param newEnvVars
|
||||
-- An optional key-value environment table for token expansion; keys and
|
||||
-- values provided in this table will be available for tokens to use.
|
||||
-- @return
|
||||
-- A new context object.
|
||||
--
|
||||
|
||||
function context.extent(baseContext, newEnvVars)
|
||||
local ctx = {}
|
||||
ctx._ctx = baseContext
|
||||
ctx.environ = newEnvVars or baseContext.environ
|
||||
ctx.terms = {}
|
||||
ctx._basedir = baseContext._basedir
|
||||
|
||||
setmetatable(ctx, context.__mt_uncached)
|
||||
|
||||
return ctx
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Add a new key-value pair to refine the context filtering.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context to be filtered.
|
||||
-- @param key
|
||||
-- The new (or an existing) key value.
|
||||
-- @param value
|
||||
-- The filtering value for the key.
|
||||
---
|
||||
|
||||
function context.addFilter(ctx, key, value)
|
||||
if type(value) == "table" then
|
||||
for i = 1, #value do
|
||||
value[i] = tostring(value[i]):lower()
|
||||
end
|
||||
elseif value ~= nil then
|
||||
value = tostring(value):lower()
|
||||
end
|
||||
ctx.terms[key:lower()] = value
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Copies the list of terms from an existing context.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context to receive the copied terms.
|
||||
-- @param src
|
||||
-- The context containing the terms to copy.
|
||||
--
|
||||
|
||||
function context.copyFilters(ctx, src)
|
||||
ctx.terms = {}
|
||||
for k,v in pairs(src.terms) do
|
||||
ctx.terms[k] = v
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Merges the list of terms from an existing context.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context to receive the copied terms.
|
||||
-- @param src
|
||||
-- The context containing the terms to copy.
|
||||
--
|
||||
|
||||
function context.mergeFilters(ctx, src)
|
||||
for k, v in pairs(src.terms) do
|
||||
if k == "tags" then
|
||||
ctx.terms[k] = table.join(ctx.terms[k], v)
|
||||
else
|
||||
ctx.terms[k] = v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Sets the base directory for path token expansion in non-path fields; such
|
||||
-- values will be made relative to this path.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context in which to set the value.
|
||||
-- @param basedir
|
||||
-- The new base directory for path token expansion. This should be
|
||||
-- provided as an absolute path. This may be left nil to simply fetch
|
||||
-- the current base directory.
|
||||
-- @return
|
||||
-- The context's base directory.
|
||||
--
|
||||
|
||||
function context.basedir(ctx, basedir)
|
||||
ctx._basedir = basedir or ctx._basedir
|
||||
return ctx._basedir
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Compiles the context for better performance. The list of context terms
|
||||
-- becomes locked down; any subsequent changes are ignored.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context to compile.
|
||||
--
|
||||
|
||||
function context.compile(ctx)
|
||||
ctx._cfgset = configset.compile(ctx._cfgset, ctx.terms)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Check to see if a context's underlying configuration set is empty; that
|
||||
-- is, it does not contain any configuration blocks.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context to query.
|
||||
-- @return
|
||||
-- True if the set does not contain any blocks.
|
||||
--
|
||||
|
||||
function context.empty(ctx)
|
||||
return configset.empty(ctx._cfgset)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Fetch a value from underlying configuration set.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The context to query.
|
||||
-- @param key
|
||||
-- The property key to query.
|
||||
-- @param onlylocal
|
||||
-- If true, don't combine values from parent contexts.
|
||||
-- @return
|
||||
-- The value of the key, as determined by the configuration set. If
|
||||
-- there is a corresponding Premake field, and it the field is enabled
|
||||
-- for tokens, any contained tokens will be expanded.
|
||||
--
|
||||
|
||||
function context.fetchvalue(ctx, key, onlylocal)
|
||||
if not onlylocal then
|
||||
local value = rawget(ctx, key)
|
||||
if value ~= nil then
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
-- The underlying configuration set will only hold registered fields.
|
||||
-- If the requested key doesn't have a corresponding field, it is just
|
||||
-- a regular value to be stored and fetched from the table.
|
||||
|
||||
local field = p.field.get(key)
|
||||
if not field then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- If there is a matching field, then go fetch the aggregated value
|
||||
-- from my configuration set, and then cache it future lookups.
|
||||
|
||||
local value = configset.fetch(ctx._cfgset, field, ctx.terms, ctx, onlylocal and ctx._cfgset)
|
||||
if value then
|
||||
-- store the result for later lookups
|
||||
rawset(ctx, key, value)
|
||||
end
|
||||
|
||||
return value
|
||||
end
|
||||
|
||||
context.__mt = {
|
||||
__index = context.fetchvalue
|
||||
}
|
||||
|
||||
context.__mt_uncached = {
|
||||
__index = function(ctx, key)
|
||||
local field = p.field.get(key)
|
||||
if not field then
|
||||
return nil
|
||||
end
|
||||
local parent = rawget(ctx, '_ctx')
|
||||
return configset.fetch(parent._cfgset, field, ctx.terms, ctx, nil)
|
||||
end
|
||||
}
|
||||
|
179
Src/external_dependencies/openmpt-trunk/include/premake/src/base/criteria.lua
vendored
Normal file
179
Src/external_dependencies/openmpt-trunk/include/premake/src/base/criteria.lua
vendored
Normal file
|
@ -0,0 +1,179 @@
|
|||
--
|
||||
-- criteria.lua
|
||||
--
|
||||
-- Stores a list of criteria terms with support for negation, conjunction,
|
||||
-- and wildcard matches. Provides functions match match these criteria
|
||||
-- against various contexts.
|
||||
--
|
||||
-- Copyright (c) 2012-2015 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
|
||||
p.criteria = criteria -- criteria namespace is defined in C host
|
||||
local criteria = p.criteria
|
||||
|
||||
|
||||
--
|
||||
-- These prefixes correspond to the context information built by the oven
|
||||
-- during baking. In theory, any field could be used as a filter, but right
|
||||
-- now only these are set.
|
||||
--
|
||||
|
||||
criteria._validPrefixes = {
|
||||
_action = true,
|
||||
action = true,
|
||||
architecture = true,
|
||||
configurations = true,
|
||||
files = true,
|
||||
kind = true,
|
||||
language = true,
|
||||
_options = true,
|
||||
options = true,
|
||||
platforms = true,
|
||||
sharedlibtype = true,
|
||||
system = true,
|
||||
toolset = true,
|
||||
tags = true,
|
||||
host = true,
|
||||
}
|
||||
|
||||
|
||||
--
|
||||
-- Flattens a hierarchy of criteria terms into a single array containing all
|
||||
-- of the values as strings in the form of "term:value1 or value2" etc.
|
||||
--
|
||||
function criteria.flatten(terms)
|
||||
local result = {}
|
||||
|
||||
local function flatten(terms)
|
||||
for key, value in pairs(terms) do
|
||||
if type(key) == "number" then
|
||||
if type(value) == "table" then
|
||||
flatten(value)
|
||||
elseif value then
|
||||
table.insert(result, value)
|
||||
end
|
||||
elseif type(key) == "string" then
|
||||
local word = key .. ":"
|
||||
if type(value) == "table" then
|
||||
local values = table.flatten(value)
|
||||
word = word .. table.concat(values, " or ")
|
||||
else
|
||||
word = word .. value
|
||||
end
|
||||
table.insert(result, word)
|
||||
else
|
||||
error("Unknown key type in terms.")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
flatten(terms)
|
||||
return result
|
||||
end
|
||||
|
||||
---
|
||||
-- Create a new criteria object.
|
||||
--
|
||||
-- @param terms
|
||||
-- A list of criteria terms.
|
||||
-- @param unprefixed
|
||||
-- If true, use the old style, unprefixed filter terms. This will
|
||||
-- eventually be phased out in favor of prefixed terms only.
|
||||
-- @return
|
||||
-- A new criteria object.
|
||||
---
|
||||
|
||||
function criteria.new(terms, unprefixed)
|
||||
terms = criteria.flatten(terms)
|
||||
|
||||
-- Preprocess the list of terms for better performance in matches().
|
||||
-- Each term is replaced with a pattern, with an implied AND between
|
||||
-- them. Each pattern contains one or more words, with an implied OR
|
||||
-- between them. A word maybe be flagged as negated, or as a wildcard
|
||||
-- pattern, and may have a field prefix associated with it.
|
||||
|
||||
local patterns = {}
|
||||
|
||||
for i, term in ipairs(terms) do
|
||||
term = term:lower()
|
||||
|
||||
local pattern = {}
|
||||
local prefix = iif(unprefixed, nil, "configurations")
|
||||
|
||||
local words = term:explode(" or ")
|
||||
for _, word in ipairs(words) do
|
||||
word, prefix = criteria._word(word, prefix)
|
||||
if prefix and not criteria._validPrefixes[prefix] then
|
||||
return nil, string.format("Invalid field prefix '%s'", prefix)
|
||||
end
|
||||
|
||||
-- check for field value aliases
|
||||
if prefix then
|
||||
local fld = p.field.get(prefix)
|
||||
if fld and fld.aliases then
|
||||
word[1] = fld.aliases[word[1]] or word[1]
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(pattern, word)
|
||||
end
|
||||
|
||||
table.insert(patterns, pattern)
|
||||
end
|
||||
|
||||
-- The matching logic is written in C now for performance; compile
|
||||
-- this collection of patterns to C data structures to make that
|
||||
-- code easier to read and maintain.
|
||||
|
||||
local crit = {}
|
||||
crit.patterns = patterns
|
||||
crit.data = criteria._compile(patterns)
|
||||
crit.terms = terms
|
||||
return crit
|
||||
end
|
||||
|
||||
|
||||
|
||||
function criteria._word(word, prefix)
|
||||
local wildcard
|
||||
local assertion = true
|
||||
|
||||
-- Trim off all "not" and field prefixes and check for wildcards
|
||||
while (true) do
|
||||
if word:startswith("not ") then
|
||||
assertion = not assertion
|
||||
word = word:sub(5)
|
||||
else
|
||||
local i = word:find(":", 1, true)
|
||||
if prefix and i then
|
||||
prefix = word:sub(1, i - 1)
|
||||
word = word:sub(i + 1)
|
||||
else
|
||||
wildcard = (word:find("*", 1, true) ~= nil)
|
||||
if wildcard then
|
||||
word = path.wildcards(word)
|
||||
end
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return { word, prefix, assertion, wildcard }, prefix
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Add a new prefix to the list of allowed values for filters. Note
|
||||
-- setting a prefix on its own has no effect on the output; a filter
|
||||
-- term must also be set on the corresponding context during baking.
|
||||
--
|
||||
-- @param prefix
|
||||
-- The new prefix to be allowed.
|
||||
---
|
||||
|
||||
function criteria.allowPrefix(prefix)
|
||||
criteria._validPrefixes[prefix:lower()] = true
|
||||
end
|
||||
|
202
Src/external_dependencies/openmpt-trunk/include/premake/src/base/detoken.lua
vendored
Normal file
202
Src/external_dependencies/openmpt-trunk/include/premake/src/base/detoken.lua
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
--
|
||||
-- detoken.lua
|
||||
--
|
||||
-- Expands tokens.
|
||||
--
|
||||
-- Copyright (c) 2011-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
p.detoken = {}
|
||||
|
||||
local detoken = p.detoken
|
||||
|
||||
|
||||
--
|
||||
-- Expand tokens in a value.
|
||||
--
|
||||
-- @param value
|
||||
-- The value containing the tokens to be expanded.
|
||||
-- @param environ
|
||||
-- An execution environment for any token expansion. This is a list of
|
||||
-- key-value pairs that will be inserted as global variables into the
|
||||
-- token expansion runtime environment.
|
||||
-- @param field
|
||||
-- The definition of the field which stores the value.
|
||||
-- @param basedir
|
||||
-- If provided, path tokens encountered in non-path fields (where
|
||||
-- field.paths is set to false) will be made relative to this location.
|
||||
-- @return
|
||||
-- The value with any contained tokens expanded.
|
||||
--
|
||||
|
||||
function detoken.expand(value, environ, field, basedir)
|
||||
function expandtoken(token, e, f)
|
||||
-- fetch the path variable from the action, if needed
|
||||
local varMap = {}
|
||||
if f.pathVars or e.overridePathVars then
|
||||
local action = p.action.current()
|
||||
if action then
|
||||
varMap = action.pathVars or {}
|
||||
end
|
||||
end
|
||||
|
||||
-- fetch the pathVars from the enviroment.
|
||||
local envMap = e.pathVars or {}
|
||||
|
||||
-- enable access to the global environment
|
||||
setmetatable(e, {__index = _G})
|
||||
|
||||
local isAbs = false
|
||||
local err
|
||||
local result
|
||||
local success
|
||||
|
||||
-- if the token starts with a !, don't try making it relative.
|
||||
local dontMakeRelative = token:startswith('!')
|
||||
if dontMakeRelative then
|
||||
token = token:sub(2, -1)
|
||||
end
|
||||
|
||||
-- If this token is in my path variable mapping tables, replace the
|
||||
-- value with the one from the map. This needs to go here because
|
||||
-- I don't want to make the result relative, but I don't want the
|
||||
-- absolute path handling below.
|
||||
local mapped = envMap[token] or varMap[token]
|
||||
if mapped then
|
||||
err = nil
|
||||
result = mapped
|
||||
if type(result) == "function" then
|
||||
success, result = pcall(result, e)
|
||||
if not success then
|
||||
return nil, result
|
||||
end
|
||||
end
|
||||
|
||||
if (type(result) == "table") then
|
||||
isAbs = result.absolute
|
||||
result = result.token
|
||||
else
|
||||
isAbs = path.isabsolute(result)
|
||||
end
|
||||
else
|
||||
-- convert the token into a function to execute
|
||||
local func
|
||||
func, err = load("return " .. token, nil, 't', e)
|
||||
if not func then
|
||||
return nil, "load error: " .. err
|
||||
end
|
||||
|
||||
-- run it and get the result
|
||||
success, result = pcall(func)
|
||||
if not success then
|
||||
err = result
|
||||
result = nil
|
||||
else
|
||||
err = nil
|
||||
result = result or ""
|
||||
end
|
||||
|
||||
if result ~= nil then
|
||||
-- ensure we got a string.
|
||||
result = tostring(result)
|
||||
|
||||
-- If the result is an absolute path, and it is being inserted into
|
||||
-- a NON-path value, I need to make it relative to the project that
|
||||
-- will contain it. Otherwise I ended up with an absolute path in
|
||||
-- the generated project, and it can no longer be moved around.
|
||||
if path.hasdeferredjoin(result) then
|
||||
result = path.resolvedeferredjoin(result)
|
||||
end
|
||||
isAbs = path.isabsolute(result)
|
||||
if isAbs and not f.paths and basedir and not dontMakeRelative then
|
||||
result = path.getrelative(basedir, result)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- If the result is an absolute path, and it is being inserted into
|
||||
-- a path value, place a special marker at the start of it. After
|
||||
-- all results have been processed, I can look for these markers to
|
||||
-- find the last absolute path expanded.
|
||||
--
|
||||
-- Example: the value "/home/user/myprj/%{cfg.objdir}" expands to:
|
||||
-- "/home/user/myprj//home/user/myprj/obj/Debug".
|
||||
--
|
||||
-- By inserting a marker this becomes:
|
||||
-- "/home/user/myprj/[\0]/home/user/myprj/obj/Debug".
|
||||
--
|
||||
-- I can now trim everything before the marker to get the right
|
||||
-- result, which should always be the last absolute path specified:
|
||||
-- "/home/user/myprj/obj/Debug"
|
||||
|
||||
if result ~= nil and isAbs and f.paths then
|
||||
result = "\0" .. result
|
||||
end
|
||||
return result, err
|
||||
end
|
||||
|
||||
function expandvalue(value, e, f)
|
||||
if type(value) ~= "string" then
|
||||
return value
|
||||
end
|
||||
|
||||
local count
|
||||
repeat
|
||||
value, count = value:gsub("%%{(.-)}", function(token)
|
||||
local result, err = expandtoken(token:gsub("\\", "\\\\"), e, f)
|
||||
if err then
|
||||
error(err .. " in token: " .. token, 0)
|
||||
end
|
||||
if not result then
|
||||
error("Token returned nil, it may not exist: " .. token, 0)
|
||||
end
|
||||
return result
|
||||
end)
|
||||
until count == 0
|
||||
|
||||
-- if a path, look for a split out embedded absolute paths
|
||||
if f.paths then
|
||||
local i, j
|
||||
repeat
|
||||
i, j = value:find("\0")
|
||||
if i then
|
||||
value = value:sub(i + 1)
|
||||
end
|
||||
until not i
|
||||
end
|
||||
return value
|
||||
end
|
||||
|
||||
local expand_cache = {}
|
||||
|
||||
function recurse(value, e, f)
|
||||
if type(value) == "table" then
|
||||
local res_table = {}
|
||||
|
||||
for k, v in pairs(value) do
|
||||
if tonumber(k) ~= nil then
|
||||
res_table[k] = recurse(v, e, f)
|
||||
else
|
||||
local nk = recurse(k, e, f)
|
||||
res_table[nk] = recurse(v, e, f)
|
||||
end
|
||||
end
|
||||
|
||||
return res_table
|
||||
else
|
||||
local res = expand_cache[value]
|
||||
if res == nil then
|
||||
if type(value) == "string" and path.hasdeferredjoin(value) then
|
||||
value = path.resolvedeferredjoin(value)
|
||||
end
|
||||
res = expandvalue(value, e, f)
|
||||
expand_cache[value] = res
|
||||
end
|
||||
return res
|
||||
end
|
||||
end
|
||||
|
||||
return recurse(value, environ, field or {})
|
||||
end
|
||||
|
391
Src/external_dependencies/openmpt-trunk/include/premake/src/base/field.lua
vendored
Normal file
391
Src/external_dependencies/openmpt-trunk/include/premake/src/base/field.lua
vendored
Normal file
|
@ -0,0 +1,391 @@
|
|||
---
|
||||
-- base/field.lua
|
||||
--
|
||||
-- Fields hold a particular bit of information about a configuration, such
|
||||
-- as the language of a project or the list of files it uses. Each field has
|
||||
-- a particular data "kind", which describes the structure of the information
|
||||
-- it holds, such a simple string, or a list of paths.
|
||||
--
|
||||
-- The field.* functions here manage the definition of these fields, and the
|
||||
-- accessor functions required to get, set, remove, and merge their values.
|
||||
--
|
||||
-- Copyright (c) 2014 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.field = {}
|
||||
local field = p.field
|
||||
|
||||
|
||||
-- Lists to hold all of the registered fields and data kinds
|
||||
|
||||
field._list = {}
|
||||
field._loweredList = {}
|
||||
field._sortedList = nil
|
||||
field._kinds = {}
|
||||
|
||||
-- For historical reasons
|
||||
premake.fields = field._list
|
||||
|
||||
-- A cache for data kind accessor functions
|
||||
|
||||
field._accessors = {}
|
||||
|
||||
|
||||
---
|
||||
-- Register a new field.
|
||||
--
|
||||
-- @param f
|
||||
-- A table describing the new field, with these keys:
|
||||
-- name A unique string name for the field, to be used to identify
|
||||
-- the field in future operations.
|
||||
-- kind The kind of values that can be stored into this field. Kinds
|
||||
-- can be chained together to create more complex types, such as
|
||||
-- "list:string".
|
||||
--
|
||||
-- In addition, any custom keys set on the field description will be
|
||||
-- maintained.
|
||||
--
|
||||
-- @return
|
||||
-- A populated field object. Or nil and an error message if the field could
|
||||
-- not be registered.
|
||||
---
|
||||
|
||||
function field.new(f)
|
||||
-- Translate the old approaches to data kind definitions to the new
|
||||
-- one used here. These should probably be deprecated eventually.
|
||||
|
||||
if f.kind:startswith("key-") then
|
||||
f.kind = f.kind:sub(5)
|
||||
f.keyed = true
|
||||
end
|
||||
|
||||
if f.kind:endswith("-list") then
|
||||
f.kind = f.kind:sub(1, -6)
|
||||
f.list = true
|
||||
end
|
||||
|
||||
local kind = f.kind
|
||||
|
||||
if kind == "object" or kind == "array" then
|
||||
kind = "table"
|
||||
end
|
||||
|
||||
if f.list then
|
||||
kind = "list:" .. kind
|
||||
end
|
||||
|
||||
if f.keyed then
|
||||
kind = "keyed:" .. kind
|
||||
end
|
||||
|
||||
-- Store the translated kind with a new name, so legacy add-on code
|
||||
-- can continue to work with the old value.
|
||||
|
||||
f._kind = kind
|
||||
|
||||
-- Make sure scope is always an array; don't overwrite old value
|
||||
if type(f.scope) == "table" then
|
||||
f.scopes = f.scope
|
||||
else
|
||||
f.scopes = { f.scope }
|
||||
end
|
||||
|
||||
-- All fields must have a valid store() function
|
||||
if not field.accessor(f, "store") then
|
||||
return nil, "invalid field kind '" .. f._kind .. "'"
|
||||
end
|
||||
|
||||
field._list[f.name] = f
|
||||
field._loweredList[f.name:lower()] = f
|
||||
field._sortedList = nil
|
||||
|
||||
return f
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Remove a previously created field definition.
|
||||
---
|
||||
|
||||
function field.unregister(f)
|
||||
field._list[f.name] = nil
|
||||
field._loweredList[f.name:lower()] = nil
|
||||
field._sortedList = nil
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Returns an iterator for the list of registered fields; the
|
||||
-- ordering of returned results is arbitrary.
|
||||
---
|
||||
|
||||
function field.each()
|
||||
local index
|
||||
return function ()
|
||||
index = next(field._list, index)
|
||||
return field._list[index]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Returns an iterator for the list of registered fields; the
|
||||
-- results are in a prioritized order, then alphabetized.
|
||||
---
|
||||
|
||||
function field.eachOrdered()
|
||||
if not field._sortedList then
|
||||
-- no priorities yet, just alpha sort
|
||||
local keys = table.keys(field._list)
|
||||
table.sort(keys)
|
||||
|
||||
field._sortedList = {}
|
||||
for i = 1, #keys do
|
||||
field._sortedList[i] = field._list[keys[i]]
|
||||
end
|
||||
end
|
||||
|
||||
local i = 0
|
||||
return function ()
|
||||
i = i + 1
|
||||
return field._sortedList[i]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Register a new kind of data for field storage.
|
||||
--
|
||||
-- @param tag
|
||||
-- A unique name of the kind; used in the kind string in new field
|
||||
-- definitions (see new(), above).
|
||||
-- @param settings
|
||||
-- A table containing the processor functions for the new kind. If
|
||||
-- nil, no change is made to the current field settings.
|
||||
-- @return
|
||||
-- The settings table for the specified tag.
|
||||
---
|
||||
|
||||
function field.kind(tag, settings)
|
||||
if settings then
|
||||
field._kinds[tag] = settings
|
||||
end
|
||||
return field._kinds[tag]
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Build an "accessor" function to process incoming values for a field. This
|
||||
-- function should be an interview question.
|
||||
--
|
||||
-- An accessor function takes the form of:
|
||||
--
|
||||
-- function (field, current, value, nextAccessor)
|
||||
--
|
||||
-- It receives the target field, the current value of that field, and the new
|
||||
-- value that has been provided by the project script. It then returns the
|
||||
-- new value for the target field.
|
||||
--
|
||||
-- @param f
|
||||
-- The field for which an accessor should be returned.
|
||||
-- @param method
|
||||
-- The type of accessor function required; currently this should be one of
|
||||
-- "store", "remove", or "merge" though it is possible for add-on modules to
|
||||
-- extend the available methods by implementing appropriate processing
|
||||
-- functions.
|
||||
-- @return
|
||||
-- An accessor function for the field's kind and method. May return nil
|
||||
-- if no processing functions are available for the given method.
|
||||
---
|
||||
|
||||
|
||||
function field.accessor(f, method)
|
||||
-- Prepare a cache for accessors using this method; each encountered
|
||||
-- kind only needs to be fully processed once.
|
||||
|
||||
field._accessors[method] = field._accessors[method] or {}
|
||||
local cache = field._accessors[method]
|
||||
|
||||
-- Helper function recurses over each piece of the field's data kind,
|
||||
-- building an accessor function for each sequence encountered. Results
|
||||
-- cached from earlier calls are reused again.
|
||||
|
||||
local function accessorForKind(kind)
|
||||
-- I'll end up with a kind of "" when I hit the end of the string
|
||||
if kind == "" then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Have I already cached a result from an earlier call?
|
||||
if cache[kind] then
|
||||
return cache[kind]
|
||||
end
|
||||
|
||||
-- Split off the first piece from the rest of the kind. If the
|
||||
-- incoming kind is "list:key:string", thisKind will be "list"
|
||||
-- and nextKind will be "key:string".
|
||||
|
||||
local thisKind = kind:match('(.-):') or kind
|
||||
local nextKind = kind:sub(#thisKind + 2)
|
||||
|
||||
-- Get the processor function for this kind. Processors perform
|
||||
-- data validation and storage appropriate for the data structure.
|
||||
|
||||
local functions = field._kinds[thisKind]
|
||||
if not functions then
|
||||
return nil, "Invalid field kind '" .. thisKind .. "'"
|
||||
end
|
||||
|
||||
local processor = functions[method]
|
||||
if not processor then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Now recurse to get the accessor function for the remaining parts
|
||||
-- of the field's data kind. If the kind was "list:key:string", then
|
||||
-- the processor function handles the "list" part, and this function
|
||||
-- takes care of the "key:string" part.
|
||||
|
||||
local nextAccessor = accessorForKind(nextKind)
|
||||
|
||||
-- Now here's the magic: wrap the processor and the next accessor
|
||||
-- up together into a Matryoshka doll of function calls, each call
|
||||
-- handling just it's level of the kind.
|
||||
|
||||
accessor = function(f, current, value)
|
||||
return processor(f, current, value, nextAccessor)
|
||||
end
|
||||
|
||||
-- And cache the result so I don't have to go through that again
|
||||
cache[kind] = accessor
|
||||
return accessor
|
||||
end
|
||||
|
||||
return accessorForKind(f._kind)
|
||||
end
|
||||
|
||||
|
||||
|
||||
function field.compare(f, a, b)
|
||||
local processor = field.accessor(f, "compare")
|
||||
if processor then
|
||||
return processor(f, a, b)
|
||||
else
|
||||
return (a == b)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Fetch a field description by name.
|
||||
---
|
||||
|
||||
function field.get(name)
|
||||
return field._list[name] or field._loweredList[name:lower()]
|
||||
end
|
||||
|
||||
|
||||
|
||||
function field.merge(f, current, value)
|
||||
local processor = field.accessor(f, "merge")
|
||||
if processor then
|
||||
return processor(f, current, value)
|
||||
else
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Is this a field that supports merging values together? Non-merging fields
|
||||
-- can simply overwrite their values, merging fields can call merge() to
|
||||
-- combine two values together.
|
||||
---
|
||||
|
||||
function field.merges(f)
|
||||
return (field.accessor(f, "merge") ~= nil)
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve a property from a field, based on it's data kind. Allows extra
|
||||
-- information to be stored along with the data kind definitions; use this
|
||||
-- call to find the first value in the field's data kind chain.
|
||||
---
|
||||
|
||||
function field.property(f, tag)
|
||||
local kinds = string.explode(f._kind, ":", true)
|
||||
for i, kind in ipairs(kinds) do
|
||||
local value = field._kinds[kind][tag]
|
||||
if value ~= nil then
|
||||
return value
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Override one of the field kind accessor functions. This works just like
|
||||
-- p.override(), but applies the new function to the internal field
|
||||
-- description and clears the accessor caches to make sure the change gets
|
||||
-- picked up by future operations.
|
||||
---
|
||||
|
||||
function field.override(fieldName, accessorName, func)
|
||||
local kind = field.kind(fieldName)
|
||||
p.override(kind, accessorName, func)
|
||||
field._accessors = {}
|
||||
end
|
||||
|
||||
|
||||
function field.remove(f, current, value)
|
||||
local processor = field.accessor(f, "remove")
|
||||
if processor then
|
||||
return processor(f, current, value)
|
||||
else
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
function field.removes(f)
|
||||
return (field.accessor(f, "merge") ~= nil and field.accessor(f, "remove") ~= nil)
|
||||
end
|
||||
|
||||
|
||||
|
||||
function field.store(f, current, value)
|
||||
local processor = field.accessor(f, "store")
|
||||
if processor then
|
||||
return processor(f, current, value)
|
||||
else
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
function field.translate(f, value)
|
||||
local processor = field.accessor(f, "translate")
|
||||
if processor then
|
||||
return processor(f, value, nil)[1]
|
||||
else
|
||||
return value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function field.translates(f)
|
||||
return (field.accessor(f, "translate") ~= nil)
|
||||
end
|
||||
|
300
Src/external_dependencies/openmpt-trunk/include/premake/src/base/fileconfig.lua
vendored
Normal file
300
Src/external_dependencies/openmpt-trunk/include/premake/src/base/fileconfig.lua
vendored
Normal file
|
@ -0,0 +1,300 @@
|
|||
--
|
||||
-- fileconfig.lua
|
||||
-- The set of configuration information for a specific file.
|
||||
-- Copyright (c) 2011-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
p.fileconfig = {}
|
||||
|
||||
local fileconfig = p.fileconfig
|
||||
local context = p.context
|
||||
local project = p.project
|
||||
|
||||
|
||||
--
|
||||
-- A little confusing: the file configuration actually contains two objects.
|
||||
-- The first object, the one that is returned by fileconfig.new() and later
|
||||
-- passed back in as *the* file configuration object, contains the common
|
||||
-- project-wide settings for the file. This object also contains a list of
|
||||
-- "sub-configurations", one for each project configuration to which the file
|
||||
-- belongs.
|
||||
--
|
||||
-- Internally, I'm calling the first object the "file configuration" (fcfg)
|
||||
-- and the children "file sub-configurations" (fsub). To distinguish them
|
||||
-- from the project configurations (cfg).
|
||||
--
|
||||
-- Define metatables for each of types; more info below.
|
||||
--
|
||||
|
||||
fileconfig.fcfg_mt = {}
|
||||
fileconfig.fsub_mt = {}
|
||||
|
||||
|
||||
--
|
||||
-- Create a new file configuration object.
|
||||
--
|
||||
-- @param fname
|
||||
-- The absolute path to the file.
|
||||
-- @param prj
|
||||
-- The project which contains the file.
|
||||
-- @return
|
||||
-- A new file configuration object.
|
||||
--
|
||||
|
||||
function fileconfig.new(fname, prj)
|
||||
local environ = { }
|
||||
local fcfg = context.new(prj, environ)
|
||||
context.copyFilters(fcfg, prj)
|
||||
context.addFilter(fcfg, "files", fname:lower())
|
||||
|
||||
for key, value in pairs(prj.environ) do
|
||||
environ[key] = value
|
||||
end
|
||||
|
||||
environ.file = fcfg
|
||||
context.compile(fcfg)
|
||||
|
||||
fcfg.project = prj
|
||||
fcfg.workspace = prj.workspace
|
||||
fcfg.configs = {}
|
||||
fcfg.abspath = fname
|
||||
|
||||
context.basedir(fcfg, prj.location)
|
||||
|
||||
-- Most of the other path properties are computed on demand
|
||||
-- from the file's absolute path.
|
||||
|
||||
setmetatable(fcfg, fileconfig.fcfg_mt)
|
||||
|
||||
-- Except for the virtual path, which is expensive to compute, and
|
||||
-- can be used across all the sub-configurations
|
||||
|
||||
local vpath = project.getvpath(prj, fname)
|
||||
if vpath ~= fcfg.abspath then
|
||||
fcfg.vpath = vpath
|
||||
end
|
||||
|
||||
return fcfg
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Associate a new project configuration with a file. It is possible for a
|
||||
-- file to only appear in a subset of a project's configurations.
|
||||
--
|
||||
-- @param fcfg
|
||||
-- The file configuration to which the project configuration should be
|
||||
-- associated.
|
||||
-- @param cfg
|
||||
-- The project configuration to associate.
|
||||
--
|
||||
|
||||
function fileconfig.addconfig(fcfg, cfg)
|
||||
local prj = cfg.project
|
||||
local wks = cfg.workspace
|
||||
|
||||
-- Create a new context object for this configuration-file pairing.
|
||||
-- The context has the ability to pull out configuration settings
|
||||
-- specific to the file.
|
||||
|
||||
local environ = {}
|
||||
local fsub = context.new(prj, environ)
|
||||
context.copyFilters(fsub, fcfg)
|
||||
context.mergeFilters(fsub, cfg)
|
||||
|
||||
fcfg.configs[cfg] = fsub
|
||||
|
||||
-- set up an environment for expanding tokens contained by this file
|
||||
-- configuration; based on the configuration's environment so that
|
||||
-- any magic set up there gets maintained
|
||||
|
||||
for key, value in pairs(cfg.environ) do
|
||||
environ[key] = value
|
||||
end
|
||||
|
||||
for key, value in pairs(fcfg.environ) do
|
||||
environ[key] = value
|
||||
end
|
||||
|
||||
-- finish the setup
|
||||
|
||||
context.compile(fsub)
|
||||
fsub.abspath = fcfg.abspath
|
||||
fsub.vpath = fcfg.vpath
|
||||
fsub.config = cfg
|
||||
fsub.project = prj
|
||||
fsub.workspace = wks
|
||||
|
||||
-- Set the context's base directory to the project's file system
|
||||
-- location. Any path tokens which are expanded in non-path fields
|
||||
-- (such as the custom build commands) will be made relative to
|
||||
-- this path, ensuring a portable generated project.
|
||||
|
||||
context.basedir(fsub, prj.location)
|
||||
|
||||
return setmetatable(fsub, fileconfig.fsub_mt)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the configuration settings for a particular file/project
|
||||
-- configuration pairing.
|
||||
--
|
||||
-- @param fcfg
|
||||
-- The file configuration to query.
|
||||
-- @param cfg
|
||||
-- The project configuration to query.
|
||||
-- @return
|
||||
-- The configuration context for the pairing, or nil if this project
|
||||
-- configuration is not associated with this file.
|
||||
--
|
||||
|
||||
function fileconfig.getconfig(fcfg, cfg)
|
||||
return fcfg.configs[cfg]
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Checks to see if the project or file configuration contains a
|
||||
-- custom build rule.
|
||||
--
|
||||
-- @param cfg
|
||||
-- A project or file configuration.
|
||||
-- @return
|
||||
-- True if the configuration contains settings for a custom
|
||||
-- build rule.
|
||||
--
|
||||
|
||||
function fileconfig.hasCustomBuildRule(fcfg)
|
||||
return fcfg and (#fcfg.buildcommands > 0) and (#fcfg.buildoutputs > 0)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Checks to see if the file configuration contains any unique information,
|
||||
-- or if it is the same as its parent configuration.
|
||||
--
|
||||
-- @param fcfg
|
||||
-- A file configuration.
|
||||
-- @return
|
||||
-- True if the file configuration contains values which differ from the
|
||||
-- parent project configuration, false otherwise.
|
||||
--
|
||||
|
||||
function fileconfig.hasFileSettings(fcfg)
|
||||
if not fcfg then
|
||||
return false
|
||||
end
|
||||
for key, field in pairs(p.fields) do
|
||||
if field.scopes[1] == "config" then
|
||||
local value = fcfg[field.name]
|
||||
if value then
|
||||
if type(value) == "table" then
|
||||
if #value > 0 then
|
||||
return true
|
||||
end
|
||||
else
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Rather than store pre-computed strings for all of the path variations
|
||||
-- (abspath, relpath, vpath, name, etc.) for each file (there can be quite
|
||||
-- a lot of them) I assign a metatable to the file configuration objects
|
||||
-- that will build these values on the fly.
|
||||
--
|
||||
-- I am using these pseudo-properties, rather than explicit functions, to make
|
||||
-- it easier to fetch them script tokens (i.e. %{file.relpath} with no need
|
||||
-- for knowledge of the internal Premake APIs.
|
||||
--
|
||||
|
||||
|
||||
--
|
||||
-- The indexer for the file configurations. If I have a path building function
|
||||
-- to fulfill the request, call it. Else fall back to the context's own value lookups.
|
||||
--
|
||||
|
||||
local fcfg_mt = fileconfig.fcfg_mt
|
||||
|
||||
fcfg_mt.__index = function(fcfg, key)
|
||||
if type(fcfg_mt[key]) == "function" then
|
||||
return fcfg_mt[key](fcfg)
|
||||
end
|
||||
return context.__mt.__index(fcfg, key)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- The indexer for the file sub-configurations. Check for a path building
|
||||
-- function first, and then fall back to the context's own value lookups.
|
||||
-- TODO: Would be great if this didn't require inside knowledge of context.
|
||||
--
|
||||
|
||||
fileconfig.fsub_mt.__index = function(fsub, key)
|
||||
if type(fcfg_mt[key]) == "function" then
|
||||
return fcfg_mt[key](fsub)
|
||||
end
|
||||
return context.__mt.__index(fsub, key)
|
||||
end
|
||||
|
||||
--
|
||||
-- And here are the path building functions.
|
||||
--
|
||||
|
||||
function fcfg_mt.basename(fcfg)
|
||||
return path.getbasename(fcfg.abspath)
|
||||
end
|
||||
|
||||
|
||||
function fcfg_mt.directory(fcfg)
|
||||
return path.getdirectory(fcfg.abspath)
|
||||
end
|
||||
|
||||
function fcfg_mt.reldirectory(fcfg)
|
||||
return path.getdirectory(fcfg.relpath)
|
||||
end
|
||||
|
||||
function fcfg_mt.name(fcfg)
|
||||
return path.getname(fcfg.abspath)
|
||||
end
|
||||
|
||||
|
||||
function fcfg_mt.objname(fcfg)
|
||||
if fcfg.sequence ~= nil and fcfg.sequence > 0 then
|
||||
return fcfg.basename .. fcfg.sequence
|
||||
else
|
||||
return fcfg.basename
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function fcfg_mt.path(fcfg)
|
||||
return fcfg.relpath
|
||||
end
|
||||
|
||||
|
||||
function fcfg_mt.relpath(fcfg)
|
||||
return project.getrelative(fcfg.project, fcfg.abspath)
|
||||
end
|
||||
|
||||
|
||||
function fcfg_mt.vpath(fcfg)
|
||||
-- This only gets called if no explicit virtual path was set
|
||||
return fcfg.relpath
|
||||
end
|
||||
|
||||
|
||||
function fcfg_mt.extension(fcfg)
|
||||
return path.getextension(fcfg.abspath)
|
||||
end
|
113
Src/external_dependencies/openmpt-trunk/include/premake/src/base/global.lua
vendored
Normal file
113
Src/external_dependencies/openmpt-trunk/include/premake/src/base/global.lua
vendored
Normal file
|
@ -0,0 +1,113 @@
|
|||
---
|
||||
-- global.lua
|
||||
-- The global container holds workspaces and rules.
|
||||
-- Copyright (c) 2014-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.global = p.api.container("global")
|
||||
local global = p.global
|
||||
|
||||
|
||||
---
|
||||
-- Create a new global container instance.
|
||||
---
|
||||
|
||||
function global.new(name)
|
||||
return p.container.new(p.global, name)
|
||||
end
|
||||
|
||||
---
|
||||
-- Bakes the global scope.
|
||||
---
|
||||
function global.bake(self)
|
||||
p.container.bakeChildren(self)
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Iterate over the collection of rules in a session.
|
||||
--
|
||||
-- @returns
|
||||
-- An iterator function.
|
||||
---
|
||||
|
||||
function global.eachRule()
|
||||
local root = p.api.rootContainer()
|
||||
return p.container.eachChild(root, p.rule)
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Iterate over the collection of workspaces in a session.
|
||||
--
|
||||
-- @returns
|
||||
-- A workspace iterator function.
|
||||
---
|
||||
|
||||
function global.eachWorkspace()
|
||||
local root = p.api.rootContainer()
|
||||
return p.container.eachChild(root, p.workspace)
|
||||
end
|
||||
|
||||
p.alias(global, "eachWorkspace", "eachSolution")
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve a rule by name or index.
|
||||
--
|
||||
-- @param key
|
||||
-- The rule key, either a string name or integer index.
|
||||
-- @returns
|
||||
-- The rule with the provided key.
|
||||
---
|
||||
|
||||
function global.getRule(key)
|
||||
local root = p.api.rootContainer()
|
||||
return root.rules[key]
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve the rule to applies to the provided file name, if any such
|
||||
-- rule exists.
|
||||
--
|
||||
-- @param fname
|
||||
-- The name of the file.
|
||||
-- @param rules
|
||||
-- A list of rule names to be included in the search. If not specified,
|
||||
-- all rules will be checked.
|
||||
-- @returns
|
||||
-- The rule, is one has been registered, or nil.
|
||||
---
|
||||
|
||||
function global.getRuleForFile(fname, rules)
|
||||
for rule in global.eachRule() do
|
||||
if not rules or table.contains(rules, rule.name) then
|
||||
if path.hasextension(fname, rule.fileextension) then
|
||||
return rule
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Retrieve a workspace by name or index.
|
||||
--
|
||||
-- @param key
|
||||
-- The workspace key, either a string name or integer index.
|
||||
-- @returns
|
||||
-- The workspace with the provided key.
|
||||
---
|
||||
|
||||
function global.getWorkspace(key)
|
||||
local root = p.api.rootContainer()
|
||||
return root.workspaces[key]
|
||||
end
|
||||
|
||||
p.alias(global, "getWorkspace", "getSolution")
|
85
Src/external_dependencies/openmpt-trunk/include/premake/src/base/globals.lua
vendored
Normal file
85
Src/external_dependencies/openmpt-trunk/include/premake/src/base/globals.lua
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
--
|
||||
-- globals.lua
|
||||
-- Replacements and extensions to Lua's global functions.
|
||||
-- Copyright (c) 2002-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
--
|
||||
-- Find and execute a Lua source file present on the filesystem, but
|
||||
-- continue without error if the file is not present. This is used to
|
||||
-- handle optional files such as the premake-system.lua script.
|
||||
--
|
||||
-- @param fname
|
||||
-- The name of the file to load. This may be specified as a single
|
||||
-- file path or an array of file paths, in which case the first
|
||||
-- file found is run.
|
||||
-- @return
|
||||
-- True if a file was found and executed, nil otherwise.
|
||||
--
|
||||
|
||||
function dofileopt(fname)
|
||||
if type(fname) == "string" then fname = {fname} end
|
||||
for i = 1, #fname do
|
||||
local found = os.locate(fname[i])
|
||||
if not found then
|
||||
found = os.locate(fname[i] .. ".lua")
|
||||
end
|
||||
if found then
|
||||
dofile(found)
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Load and run an external script file, with a bit of extra logic to make
|
||||
-- including projects easier. if "path" is a directory, will look for
|
||||
-- path/premake5.lua. And each file is tracked, and loaded only once.
|
||||
--
|
||||
-- @param fname
|
||||
-- The name of the directory or file to include. If a directory, will
|
||||
-- automatically include the contained premake5.lua or premake4.lua
|
||||
-- script at that lcoation.
|
||||
---
|
||||
|
||||
io._includedFiles = {}
|
||||
|
||||
function include(fname)
|
||||
local fullPath = premake.findProjectScript(fname)
|
||||
fname = fullPath or fname
|
||||
if not io._includedFiles[fname] then
|
||||
io._includedFiles[fname] = true
|
||||
return dofile(fname)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Extend require() with a second argument to specify the expected
|
||||
-- version of the loaded module. Raises an error if the version criteria
|
||||
-- are not met.
|
||||
--
|
||||
-- @param modname
|
||||
-- The name of the module to load.
|
||||
-- @param versions
|
||||
-- An optional version criteria string; see premake.checkVersion()
|
||||
-- for more information on the format.
|
||||
-- @return
|
||||
-- If successful, the loaded module, which is also stored into the
|
||||
-- global package.loaded table.
|
||||
---
|
||||
|
||||
premake.override(_G, "require", function(base, modname, versions)
|
||||
local result, mod = pcall(base,modname)
|
||||
if not result then
|
||||
error(mod, 3)
|
||||
end
|
||||
if mod and versions and not premake.checkVersion(mod._VERSION, versions) then
|
||||
error(string.format("module %s %s does not meet version criteria %s",
|
||||
modname, mod._VERSION or "(none)", versions), 3)
|
||||
end
|
||||
return mod
|
||||
end)
|
22
Src/external_dependencies/openmpt-trunk/include/premake/src/base/group.lua
vendored
Normal file
22
Src/external_dependencies/openmpt-trunk/include/premake/src/base/group.lua
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
-- group.lua
|
||||
-- A psuedo-configuration container to represent project groups.
|
||||
-- Copyright (c) 2014 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.group = p.api.container("group", p.workspace)
|
||||
local group = p.group
|
||||
|
||||
|
||||
---
|
||||
-- Bit of a hack: prevent groups from holding any configuration data.
|
||||
---
|
||||
|
||||
group.placeholder = true
|
||||
|
||||
|
||||
|
||||
function group.new(name)
|
||||
return p.container.new(group, name)
|
||||
end
|
81
Src/external_dependencies/openmpt-trunk/include/premake/src/base/help.lua
vendored
Normal file
81
Src/external_dependencies/openmpt-trunk/include/premake/src/base/help.lua
vendored
Normal file
|
@ -0,0 +1,81 @@
|
|||
--
|
||||
-- help.lua
|
||||
-- User help, displayed on /help option.
|
||||
-- Copyright (c) 2002-2013 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
function premake.showhelp()
|
||||
|
||||
-- display the basic usage
|
||||
printf("Premake %s, a build script generator", _PREMAKE_VERSION)
|
||||
printf(_PREMAKE_COPYRIGHT)
|
||||
printf("%s %s", _VERSION, _COPYRIGHT)
|
||||
printf("")
|
||||
printf("Usage: premake5 [options] action [arguments]")
|
||||
printf("")
|
||||
|
||||
-- filter all options by category.
|
||||
local categories = {}
|
||||
for option in premake.option.each() do
|
||||
local cat = "OPTIONS - General"
|
||||
if option.category then
|
||||
cat = "OPTIONS - " .. option.category;
|
||||
end
|
||||
|
||||
if categories[cat] then
|
||||
table.insert(categories[cat], option)
|
||||
else
|
||||
categories[cat] = {option}
|
||||
end
|
||||
end
|
||||
|
||||
-- display all options
|
||||
for k, options in spairs(categories) do
|
||||
printf(k)
|
||||
printf("")
|
||||
|
||||
local length = 0
|
||||
for _, option in ipairs(options) do
|
||||
local trigger = option.trigger
|
||||
if (option.value) then trigger = trigger .. "=" .. option.value end
|
||||
if (#trigger > length) then length = #trigger end
|
||||
end
|
||||
|
||||
for _, option in ipairs(options) do
|
||||
local trigger = option.trigger
|
||||
local description = option.description
|
||||
if (option.value) then trigger = trigger .. "=" .. option.value end
|
||||
if (option.allowed) then description = description .. "; one of:" end
|
||||
|
||||
printf(" --%-" .. length .. "s %s", trigger, description)
|
||||
if (option.allowed) then
|
||||
local function compareValue(a, b)
|
||||
return a[1] < b[1]
|
||||
end
|
||||
table.sort(option.allowed, compareValue)
|
||||
|
||||
for _, value in ipairs(option.allowed) do
|
||||
printf(" %-" .. length-1 .. "s %s", value[1], value[2])
|
||||
end
|
||||
printf("")
|
||||
end
|
||||
end
|
||||
printf("")
|
||||
end
|
||||
|
||||
-- display all actions
|
||||
printf("ACTIONS")
|
||||
printf("")
|
||||
for action in premake.action.each() do
|
||||
printf(" %-17s %s", action.trigger, action.description)
|
||||
end
|
||||
printf("")
|
||||
|
||||
|
||||
-- see more
|
||||
printf("For additional information, see https://premake.github.io")
|
||||
|
||||
end
|
||||
|
||||
|
68
Src/external_dependencies/openmpt-trunk/include/premake/src/base/http.lua
vendored
Normal file
68
Src/external_dependencies/openmpt-trunk/include/premake/src/base/http.lua
vendored
Normal file
|
@ -0,0 +1,68 @@
|
|||
--
|
||||
-- http.lua
|
||||
-- Additions to the http namespace.
|
||||
-- Copyright (c) 2008-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
if http == nil then
|
||||
return
|
||||
end
|
||||
|
||||
---
|
||||
-- Simple progress bar on stdout for curl downloads.
|
||||
---
|
||||
|
||||
function http.reportProgress(total, current)
|
||||
local width = 70
|
||||
local progress = math.floor(current * width / total)
|
||||
|
||||
if progress == width then
|
||||
io.write(string.rep(' ', width + 2) .. '\r')
|
||||
else
|
||||
io.write('[' .. string.rep('=', progress) .. string.rep(' ', width - progress) .. ']\r')
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Correctly escape parameters for use in a url.
|
||||
---
|
||||
|
||||
function http.escapeUrlParam(param)
|
||||
local url_encodings = {
|
||||
[' '] = '%%20',
|
||||
['!'] = '%%21',
|
||||
['"'] = '%%22',
|
||||
['#'] = '%%23',
|
||||
['$'] = '%%24',
|
||||
['&'] = '%%26',
|
||||
['\''] = '%%27',
|
||||
['('] = '%%28',
|
||||
[')'] = '%%29',
|
||||
['*'] = '%%2A',
|
||||
['+'] = '%%2B',
|
||||
['-'] = '%%2D',
|
||||
['.'] = '%%2E',
|
||||
['/'] = '%%2F',
|
||||
[':'] = '%%3A',
|
||||
[';'] = '%%3B',
|
||||
['<'] = '%%3C',
|
||||
['='] = '%%3D',
|
||||
['>'] = '%%3E',
|
||||
['?'] = '%%3F',
|
||||
['@'] = '%%40',
|
||||
['['] = '%%5B',
|
||||
['\\'] = '%%5C',
|
||||
[']'] = '%%5D',
|
||||
['^'] = '%%5E',
|
||||
['_'] = '%%5F',
|
||||
['`'] = '%%60'
|
||||
}
|
||||
|
||||
param = param:gsub('%%', '%%25')
|
||||
for k,v in pairs(url_encodings) do
|
||||
param = param:gsub('%' .. k, v)
|
||||
end
|
||||
|
||||
return param
|
||||
end
|
47
Src/external_dependencies/openmpt-trunk/include/premake/src/base/io.lua
vendored
Normal file
47
Src/external_dependencies/openmpt-trunk/include/premake/src/base/io.lua
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
--
|
||||
-- io.lua
|
||||
-- Additions to the I/O namespace.
|
||||
-- Copyright (c) 2008-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
--
|
||||
-- Open an overload of the io.open() function, which will create any missing
|
||||
-- subdirectories in the filename if "mode" is set to writeable.
|
||||
--
|
||||
|
||||
premake.override(io, "open", function(base, fname, mode)
|
||||
if mode and (mode:find("w") or mode:find("a")) then
|
||||
local dir = path.getdirectory(fname)
|
||||
ok, err = os.mkdir(dir)
|
||||
if not ok then
|
||||
error(err, 0)
|
||||
end
|
||||
end
|
||||
return base(fname, mode)
|
||||
end)
|
||||
|
||||
|
||||
--
|
||||
-- Write content to a new file.
|
||||
--
|
||||
function io.writefile(filename, content)
|
||||
local file = io.open(filename, "w+b")
|
||||
if file then
|
||||
file:write(content)
|
||||
file:close()
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
--
|
||||
-- Read content from new file.
|
||||
--
|
||||
function io.readfile(filename)
|
||||
local file = io.open(filename, "rb")
|
||||
if file then
|
||||
local content = file:read("*a")
|
||||
file:close()
|
||||
return content
|
||||
end
|
||||
end
|
1548
Src/external_dependencies/openmpt-trunk/include/premake/src/base/json.lua
vendored
Normal file
1548
Src/external_dependencies/openmpt-trunk/include/premake/src/base/json.lua
vendored
Normal file
File diff suppressed because it is too large
Load diff
59
Src/external_dependencies/openmpt-trunk/include/premake/src/base/jsonwrapper.lua
vendored
Normal file
59
Src/external_dependencies/openmpt-trunk/include/premake/src/base/jsonwrapper.lua
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
--
|
||||
-- jsonwrapper.lua
|
||||
-- Provides JSON encoding and decoding API by wrapping a third-party JSON library
|
||||
-- Copyright (c) 2017 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
json = {}
|
||||
|
||||
local implementation = dofile('json.lua')
|
||||
local err
|
||||
json.implementation = implementation
|
||||
|
||||
function implementation.assert(condition, message)
|
||||
if not condition then
|
||||
err = message
|
||||
end
|
||||
|
||||
-- The JSON library we're using assumes that encode error handlers will
|
||||
-- abort on error. It doesn't have the same assumption for decode error
|
||||
-- handlers, but we're using this same function for both.
|
||||
|
||||
assert(condition, message)
|
||||
end
|
||||
|
||||
function json.encode(value)
|
||||
err = nil
|
||||
|
||||
local success, result = pcall(implementation.encode, implementation, value)
|
||||
|
||||
if not success then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
function json.encode_pretty(value)
|
||||
err = nil
|
||||
|
||||
local success, result = pcall(implementation.encode_pretty, implementation, value)
|
||||
|
||||
if not success then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
function json.decode(value)
|
||||
err = nil
|
||||
|
||||
local success, result = pcall(implementation.decode, implementation, value)
|
||||
|
||||
if not success then
|
||||
return nil, err
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
24
Src/external_dependencies/openmpt-trunk/include/premake/src/base/languages.lua
vendored
Normal file
24
Src/external_dependencies/openmpt-trunk/include/premake/src/base/languages.lua
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
-- languages.lua
|
||||
-- Language helpers.
|
||||
-- Copyright (c) 2002-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.languages = {}
|
||||
|
||||
function p.languages.isc(value)
|
||||
return value == "C";
|
||||
end
|
||||
|
||||
function p.languages.iscpp(value)
|
||||
return value == "C++";
|
||||
end
|
||||
|
||||
function p.languages.iscsharp(value)
|
||||
return value == "C#";
|
||||
end
|
||||
|
||||
function p.languages.isfsharp(value)
|
||||
return value == "F#";
|
||||
end
|
174
Src/external_dependencies/openmpt-trunk/include/premake/src/base/option.lua
vendored
Normal file
174
Src/external_dependencies/openmpt-trunk/include/premake/src/base/option.lua
vendored
Normal file
|
@ -0,0 +1,174 @@
|
|||
--
|
||||
-- option.lua
|
||||
-- Work with the list of registered options.
|
||||
-- Copyright (c) 2002-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
p.option = {}
|
||||
local m = p.option
|
||||
|
||||
|
||||
--
|
||||
-- We can't control how people will type in the command line arguments, or how
|
||||
-- project scripts will define their custom options, so case becomes an issue.
|
||||
-- To mimimize issues, set up the _OPTIONS table to always use lowercase keys.
|
||||
--
|
||||
|
||||
local _OPTIONS_metatable = {
|
||||
__index = function(tbl, key)
|
||||
if type(key) == "string" then
|
||||
key = key:lower()
|
||||
end
|
||||
return rawget(tbl, key)
|
||||
end,
|
||||
__newindex = function(tbl, key, value)
|
||||
if type(key) == "string" then
|
||||
key = key:lower()
|
||||
end
|
||||
rawset(tbl, key, value)
|
||||
end
|
||||
}
|
||||
|
||||
_OPTIONS = {}
|
||||
setmetatable(_OPTIONS, _OPTIONS_metatable)
|
||||
|
||||
|
||||
--
|
||||
-- Process the raw command line arguments from _ARGV to populate
|
||||
-- the _OPTIONS table
|
||||
--
|
||||
|
||||
for i, arg in ipairs(_ARGV) do
|
||||
local key, value
|
||||
local i = arg:find("=", 1, true)
|
||||
if i then
|
||||
key = arg:sub(1, i - 1)
|
||||
value = arg:sub(i + 1)
|
||||
else
|
||||
key = arg
|
||||
value = ""
|
||||
end
|
||||
|
||||
if key:startswith("/") then
|
||||
_OPTIONS[key:sub(2)] = value
|
||||
elseif key:startswith("--") then
|
||||
_OPTIONS[key:sub(3)] = value
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- The list of registered options. Calls to newoption() will add
|
||||
-- new entries here.
|
||||
--
|
||||
|
||||
m.list = {}
|
||||
|
||||
|
||||
--
|
||||
-- Register a new option.
|
||||
--
|
||||
-- @param opt
|
||||
-- The new option object.
|
||||
--
|
||||
|
||||
function m.add(opt)
|
||||
-- some sanity checking
|
||||
local missing
|
||||
for _, field in ipairs({ "description", "trigger" }) do
|
||||
if (not opt[field]) then
|
||||
missing = field
|
||||
end
|
||||
end
|
||||
|
||||
if (missing) then
|
||||
error("option needs a " .. missing, 3)
|
||||
end
|
||||
|
||||
-- add it to the master list
|
||||
p.option.list[opt.trigger:lower()] = opt
|
||||
|
||||
-- if it has a default value, set it.
|
||||
if opt.default and not _OPTIONS[opt.trigger] then
|
||||
_OPTIONS[opt.trigger] = opt.default
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve an option by name.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the option to retrieve.
|
||||
-- @returns
|
||||
-- The requested option, or nil if the option does not exist.
|
||||
--
|
||||
|
||||
function m.get(name)
|
||||
return p.option.list[name]
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Iterator for the list of options.
|
||||
--
|
||||
|
||||
function m.each()
|
||||
-- sort the list by trigger
|
||||
local keys = { }
|
||||
for _, option in pairs(p.option.list) do
|
||||
table.insert(keys, option.trigger)
|
||||
end
|
||||
table.sort(keys)
|
||||
|
||||
local i = 0
|
||||
return function()
|
||||
i = i + 1
|
||||
return p.option.list[keys[i]]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Validate a list of user supplied key/value pairs against the list of registered options.
|
||||
--
|
||||
-- @param values
|
||||
-- The list of user supplied key/value pairs.
|
||||
-- @returns
|
||||
--- True if the list of pairs are valid, false and an error message otherwise.
|
||||
--
|
||||
|
||||
function m.validate(values)
|
||||
for key, value in pairs(values) do
|
||||
-- does this option exist
|
||||
local opt = p.option.get(key)
|
||||
if (not opt) then
|
||||
return false, "invalid option '" .. key .. "'"
|
||||
end
|
||||
|
||||
-- does it need a value?
|
||||
if (opt.value and value == "") then
|
||||
return false, "no value specified for option '" .. key .. "'"
|
||||
end
|
||||
|
||||
-- is the value allowed?
|
||||
if opt.allowed then
|
||||
local found = false
|
||||
for _, match in ipairs(opt.allowed) do
|
||||
if match[1] == value then
|
||||
found = true
|
||||
break
|
||||
end
|
||||
end
|
||||
if not found then
|
||||
return false, string.format("invalid value '%s' for option '%s'", value, key)
|
||||
end
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
809
Src/external_dependencies/openmpt-trunk/include/premake/src/base/os.lua
vendored
Normal file
809
Src/external_dependencies/openmpt-trunk/include/premake/src/base/os.lua
vendored
Normal file
|
@ -0,0 +1,809 @@
|
|||
--
|
||||
-- os.lua
|
||||
-- Additions to the OS namespace.
|
||||
-- Copyright (c) 2002-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
---
|
||||
-- Extend Lua's built-in os.execute() with token expansion and
|
||||
-- path normalization.
|
||||
--
|
||||
|
||||
premake.override(os, "execute", function(base, cmd)
|
||||
cmd = os.translateCommands(cmd)
|
||||
return base(cmd)
|
||||
end)
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Same as os.execute(), but accepts string formatting arguments.
|
||||
---
|
||||
|
||||
function os.executef(cmd, ...)
|
||||
cmd = string.format(cmd, ...)
|
||||
return os.execute(cmd)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Scan the well-known system locations for a particular library.
|
||||
--
|
||||
|
||||
local function parse_ld_so_conf(conf_file)
|
||||
-- Linux ldconfig file parser to find system library locations
|
||||
local first, last
|
||||
local dirs = { }
|
||||
for line in io.lines(conf_file) do
|
||||
-- ignore comments
|
||||
first = line:find("#", 1, true)
|
||||
if first ~= nil then
|
||||
line = line:sub(1, first - 1)
|
||||
end
|
||||
|
||||
if line ~= "" then
|
||||
-- check for include files
|
||||
first, last = line:find("include%s+")
|
||||
if first ~= nil then
|
||||
-- found include glob
|
||||
local include_glob = line:sub(last + 1)
|
||||
local includes = os.matchfiles(include_glob)
|
||||
for _, v in ipairs(includes) do
|
||||
dirs = table.join(dirs, parse_ld_so_conf(v))
|
||||
end
|
||||
else
|
||||
-- found an actual ld path entry
|
||||
table.insert(dirs, line)
|
||||
end
|
||||
end
|
||||
end
|
||||
return dirs
|
||||
end
|
||||
|
||||
local function get_library_search_path()
|
||||
local path
|
||||
if os.istarget("windows") then
|
||||
path = os.getenv("PATH") or ""
|
||||
elseif os.istarget("haiku") then
|
||||
path = os.getenv("LIBRARY_PATH") or ""
|
||||
else
|
||||
if os.istarget("darwin") then
|
||||
path = os.getenv("DYLD_LIBRARY_PATH") or ""
|
||||
else
|
||||
path = os.getenv("LD_LIBRARY_PATH") or ""
|
||||
|
||||
for _, prefix in ipairs({"", "/opt"}) do
|
||||
local conf_file = prefix .. "/etc/ld.so.conf"
|
||||
if os.isfile(conf_file) then
|
||||
for _, v in ipairs(parse_ld_so_conf(conf_file)) do
|
||||
if (#path > 0) then
|
||||
path = path .. ":" .. v
|
||||
else
|
||||
path = v
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
path = path or ""
|
||||
local archpath = "/lib:/usr/lib:/usr/local/lib"
|
||||
if os.is64bit() and not (os.istarget("darwin")) then
|
||||
archpath = "/lib64:/usr/lib64/:usr/local/lib64" .. ":" .. archpath
|
||||
end
|
||||
if (#path > 0) then
|
||||
path = path .. ":" .. archpath
|
||||
else
|
||||
path = archpath
|
||||
end
|
||||
end
|
||||
|
||||
return path
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Attempt to locate and return the path to a shared library.
|
||||
--
|
||||
-- This function does not work to locate system libraries on macOS 11 or later; it may still
|
||||
-- be used to locate user libraries: _"New in macOS Big Sur 11.0.1, the system ships with
|
||||
-- a built-in dynamic linker cache of all system-provided libraries. As part of this change,
|
||||
-- copies of dynamic libraries are no longer present on the filesystem. Code that attempts to
|
||||
-- check for dynamic library presence by looking for a file at a path or enumerating a directory
|
||||
-- will fail."
|
||||
-- https://developer.apple.com/documentation/macos-release-notes/macos-big-sur-11_0_1-release-notes
|
||||
--
|
||||
-- @param libname
|
||||
-- The library name with or without prefix and suffix.
|
||||
-- @param libdirs
|
||||
-- An array of paths to be searched.
|
||||
-- @returns
|
||||
-- The full path to the library if found; `nil` otherwise.
|
||||
---
|
||||
function os.findlib(libname, libdirs)
|
||||
local path = get_library_search_path()
|
||||
local formats
|
||||
|
||||
-- assemble a search path, depending on the platform
|
||||
if os.istarget("windows") then
|
||||
formats = { "%s.dll", "%s" }
|
||||
elseif os.istarget("haiku") then
|
||||
formats = { "lib%s.so", "%s.so" }
|
||||
else
|
||||
if os.istarget("darwin") then
|
||||
formats = { "lib%s.dylib", "%s.dylib" }
|
||||
else
|
||||
formats = { "lib%s.so", "%s.so" }
|
||||
end
|
||||
|
||||
table.insert(formats, "%s")
|
||||
end
|
||||
|
||||
local userpath = ""
|
||||
|
||||
if type(libdirs) == "string" then
|
||||
userpath = libdirs
|
||||
elseif type(libdirs) == "table" then
|
||||
userpath = table.implode(libdirs, "", "", ":")
|
||||
end
|
||||
|
||||
if (#userpath > 0) then
|
||||
if (#path > 0) then
|
||||
path = userpath .. ":" .. path
|
||||
else
|
||||
path = userpath
|
||||
end
|
||||
end
|
||||
|
||||
for _, fmt in ipairs(formats) do
|
||||
local name = string.format(fmt, libname)
|
||||
local result = os.pathsearch(name, path)
|
||||
if result then return result end
|
||||
end
|
||||
end
|
||||
|
||||
function os.findheader(headerpath, headerdirs)
|
||||
-- headerpath: a partial header file path
|
||||
-- headerdirs: additional header search paths
|
||||
|
||||
local path = get_library_search_path()
|
||||
|
||||
-- replace all /lib by /include
|
||||
path = path .. ':'
|
||||
path = path:gsub ('/lib[0-9]*([:/])', '/include%1')
|
||||
path = path:sub (1, #path - 1)
|
||||
|
||||
local userpath = ""
|
||||
|
||||
if type(headerdirs) == "string" then
|
||||
userpath = headerdirs
|
||||
elseif type(headerdirs) == "table" then
|
||||
userpath = table.implode(headerdirs, "", "", ":")
|
||||
end
|
||||
|
||||
if (#userpath > 0) then
|
||||
if (#path > 0) then
|
||||
path = userpath .. ":" .. path
|
||||
else
|
||||
path = userpath
|
||||
end
|
||||
end
|
||||
|
||||
local result = os.pathsearch (headerpath, path)
|
||||
return result
|
||||
end
|
||||
|
||||
--
|
||||
-- Retrieve the current target operating system ID string.
|
||||
--
|
||||
|
||||
function os.target()
|
||||
return _OPTIONS.os or _TARGET_OS
|
||||
end
|
||||
|
||||
function os.get()
|
||||
local caller = filelineinfo(2)
|
||||
premake.warnOnce(caller, "os.get() is deprecated, use 'os.target()' or 'os.host()'.\n @%s\n", caller)
|
||||
return os.target()
|
||||
end
|
||||
|
||||
-- deprecate _OS
|
||||
_G_metatable = {
|
||||
__index = function(t, k)
|
||||
if (k == '_OS') then
|
||||
premake.warnOnce("_OS+get", "_OS is deprecated, use '_TARGET_OS'.")
|
||||
return rawget(t, "_TARGET_OS")
|
||||
else
|
||||
return rawget(t, k)
|
||||
end
|
||||
end,
|
||||
|
||||
__newindex = function(t, k, v)
|
||||
if (k == '_OS') then
|
||||
premake.warnOnce("_OS+set", "_OS is deprecated, use '_TARGET_OS'.")
|
||||
rawset(t, "_TARGET_OS", v)
|
||||
else
|
||||
rawset(t, k, v)
|
||||
end
|
||||
end
|
||||
}
|
||||
setmetatable(_G, _G_metatable)
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Check the current target operating system; may be set with the /os command line flag.
|
||||
--
|
||||
|
||||
function os.istarget(id)
|
||||
local tags = os.getSystemTags(os.target())
|
||||
return table.contains(tags, id:lower())
|
||||
end
|
||||
|
||||
function os.is(id)
|
||||
local caller = filelineinfo(2)
|
||||
premake.warnOnce(caller, "os.is() is deprecated, use 'os.istarget()' or 'os.ishost()'.\n @%s\n", caller)
|
||||
return os.istarget(id)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Check the current host operating system.
|
||||
--
|
||||
|
||||
function os.ishost(id)
|
||||
local tags = os.getSystemTags(os.host())
|
||||
return table.contains(tags, id:lower())
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Determine if a directory exists on the file system, and that it is a
|
||||
-- directory and not a file.
|
||||
--
|
||||
-- @param p
|
||||
-- The path to check.
|
||||
-- @return
|
||||
-- True if a directory exists at the given path.
|
||||
---
|
||||
|
||||
premake.override(os, "isdir", function(base, p)
|
||||
p = path.normalize(p)
|
||||
return base(p)
|
||||
end)
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Determine if a file exists on the file system, and that it is a
|
||||
-- file and not a directory.
|
||||
--
|
||||
-- @param p
|
||||
-- The path to check.
|
||||
-- @return
|
||||
-- True if a file exists at the given path.
|
||||
---
|
||||
|
||||
premake.override(os, "isfile", function(base, p)
|
||||
p = path.normalize(p)
|
||||
return base(p)
|
||||
end)
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Determine if the current system is running a 64-bit architecture.
|
||||
--
|
||||
|
||||
local _is64bit
|
||||
|
||||
local _64BitHostTypes = {
|
||||
"x86_64",
|
||||
"ia64",
|
||||
"amd64",
|
||||
"ppc64",
|
||||
"powerpc64",
|
||||
"sparc64"
|
||||
}
|
||||
|
||||
function os.is64bit()
|
||||
-- This can be expensive to compute, so cache and reuse the response
|
||||
if _is64bit ~= nil then
|
||||
return _is64bit
|
||||
end
|
||||
|
||||
_is64bit = false
|
||||
|
||||
-- Call the native code implementation. If this returns true then
|
||||
-- we're 64-bit, otherwise do more checking locally
|
||||
if (os._is64bit()) then
|
||||
_is64bit = true
|
||||
else
|
||||
-- Identify the system
|
||||
local arch
|
||||
if os.ishost("windows") then
|
||||
arch = os.getenv("PROCESSOR_ARCHITECTURE")
|
||||
elseif os.ishost("macosx") then
|
||||
arch = os.outputof("echo $HOSTTYPE")
|
||||
else
|
||||
arch = os.outputof("uname -m")
|
||||
end
|
||||
|
||||
-- Check our known 64-bit identifiers
|
||||
arch = arch:lower()
|
||||
for _, hosttype in ipairs(_64BitHostTypes) do
|
||||
if arch:find(hosttype) then
|
||||
_is64bit = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return _is64bit
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Perform a wildcard search for files and directories.
|
||||
--
|
||||
-- @param mask
|
||||
-- The file search pattern. Use "*" to match any part of a file or
|
||||
-- directory name, "**" to recurse into subdirectories.
|
||||
-- @return
|
||||
-- A table containing the matched file or directory names.
|
||||
---
|
||||
|
||||
function os.match(mask)
|
||||
mask = path.normalize(mask)
|
||||
local starpos = mask:find("%*")
|
||||
local before = path.getdirectory(starpos and mask:sub(1, starpos - 1) or mask)
|
||||
local slashpos = starpos and mask:find("/", starpos)
|
||||
local after = slashpos and mask:sub(slashpos + 1)
|
||||
|
||||
-- Only recurse for path components starting with '**':
|
||||
local recurse = starpos and
|
||||
mask:sub(starpos + 1, starpos + 1) == '*' and
|
||||
(starpos == 1 or mask:sub(starpos - 1, starpos - 1) == '/')
|
||||
|
||||
local results = { }
|
||||
|
||||
if recurse then
|
||||
local submask = mask:sub(1, starpos) .. mask:sub(starpos + 2)
|
||||
results = os.match(submask)
|
||||
|
||||
local pattern = mask:sub(1, starpos)
|
||||
local m = os.matchstart(pattern)
|
||||
while os.matchnext(m) do
|
||||
if not os.matchisfile(m) then
|
||||
local matchpath = path.join(before, os.matchname(m), mask:sub(starpos))
|
||||
results = table.join(results, os.match(matchpath))
|
||||
end
|
||||
end
|
||||
os.matchdone(m)
|
||||
else
|
||||
local pattern = mask:sub(1, slashpos and slashpos - 1)
|
||||
local m = os.matchstart(pattern)
|
||||
while os.matchnext(m) do
|
||||
if not (slashpos and os.matchisfile(m)) then
|
||||
local matchpath = path.join(before, matchpath, os.matchname(m))
|
||||
if after then
|
||||
results = table.join(results, os.match(path.join(matchpath, after)))
|
||||
else
|
||||
table.insert(results, matchpath)
|
||||
end
|
||||
end
|
||||
end
|
||||
os.matchdone(m)
|
||||
end
|
||||
|
||||
return results
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Perform a wildcard search for directories.
|
||||
--
|
||||
-- @param mask
|
||||
-- The search pattern. Use "*" to match any part of a directory
|
||||
-- name, "**" to recurse into subdirectories.
|
||||
-- @return
|
||||
-- A table containing the matched directory names.
|
||||
---
|
||||
|
||||
function os.matchdirs(mask)
|
||||
local results = os.match(mask)
|
||||
for i = #results, 1, -1 do
|
||||
if not os.isdir(results[i]) then
|
||||
table.remove(results, i)
|
||||
end
|
||||
end
|
||||
return results
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Perform a wildcard search for files.
|
||||
--
|
||||
-- @param mask
|
||||
-- The search pattern. Use "*" to match any part of a file
|
||||
-- name, "**" to recurse into subdirectories.
|
||||
-- @return
|
||||
-- A table containing the matched directory names.
|
||||
---
|
||||
|
||||
function os.matchfiles(mask)
|
||||
local results = os.match(mask)
|
||||
for i = #results, 1, -1 do
|
||||
if not os.isfile(results[i]) then
|
||||
table.remove(results, i)
|
||||
end
|
||||
end
|
||||
return results
|
||||
end
|
||||
|
||||
--
|
||||
-- An overload of the os.mkdir() function, which will create any missing
|
||||
-- subdirectories along the path.
|
||||
--
|
||||
|
||||
local builtin_mkdir = os.mkdir
|
||||
function os.mkdir(p)
|
||||
p = path.normalize(p)
|
||||
|
||||
local dir = iif(p:startswith("/"), "/", "")
|
||||
for part in p:gmatch("[^/]+") do
|
||||
dir = dir .. part
|
||||
|
||||
if (part ~= "" and not path.isabsolute(part) and not os.isdir(dir)) then
|
||||
local ok, err = builtin_mkdir(dir)
|
||||
if (not ok) then
|
||||
return nil, err
|
||||
end
|
||||
end
|
||||
|
||||
dir = dir .. "/"
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Run a shell command and return the output.
|
||||
--
|
||||
-- @param cmd Command to execute
|
||||
-- @param streams Standard stream(s) to output
|
||||
-- Must be one of
|
||||
-- - "both" (default)
|
||||
-- - "output" Return standard output stream content only
|
||||
-- - "error" Return standard error stream content only
|
||||
--
|
||||
|
||||
function os.outputof(cmd, streams)
|
||||
cmd = path.normalize(cmd)
|
||||
streams = streams or "both"
|
||||
local redirection
|
||||
if streams == "both" then
|
||||
redirection = " 2>&1"
|
||||
elseif streams == "output" then
|
||||
redirection = " 2>/dev/null"
|
||||
elseif streams == "error" then
|
||||
redirection = " 2>&1 1>/dev/null"
|
||||
else
|
||||
error ('Invalid stream(s) selection. "output", "error", or "both" expected.')
|
||||
end
|
||||
|
||||
local pipe = io.popen(cmd .. redirection)
|
||||
local result = pipe:read('*a')
|
||||
local success, what, code = pipe:close()
|
||||
if success then
|
||||
-- chomp trailing newlines
|
||||
if result then
|
||||
result = string.gsub(result, "[\r\n]+$", "")
|
||||
end
|
||||
|
||||
return result, code, what
|
||||
else
|
||||
return nil, code, what
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- @brief An overloaded os.remove() that will be able to handle list of files,
|
||||
-- as well as wildcards for files. Uses the syntax os.matchfiles() for
|
||||
-- matching pattern wildcards.
|
||||
--
|
||||
-- @param f A file, a wildcard, or a list of files or wildcards to be removed
|
||||
--
|
||||
-- @return true on success, false and an appropriate error message on error
|
||||
--
|
||||
-- @example ok, err = os.remove{"**.bak", "**.log"}
|
||||
-- if not ok then
|
||||
-- error(err)
|
||||
-- end
|
||||
--
|
||||
|
||||
local builtin_remove = os.remove
|
||||
function os.remove(f)
|
||||
-- in case of string, just match files
|
||||
if type(f) == "string" then
|
||||
local p = os.matchfiles(f)
|
||||
for _, v in pairs(p) do
|
||||
local ok, err, code = builtin_remove(v)
|
||||
if not ok then
|
||||
return ok, err, code
|
||||
end
|
||||
end
|
||||
if #p == 0 then
|
||||
return nil, "Couldn't find any file matching: " .. f, 1
|
||||
end
|
||||
-- in case of table, match files for every table entry
|
||||
elseif type(f) == "table" then
|
||||
for _, v in pairs(f) do
|
||||
local ok, err, code = os.remove(v)
|
||||
if not ok then
|
||||
return ok, err, code
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Remove a directory, along with any contained files or subdirectories.
|
||||
--
|
||||
-- @return true on success, false and an appropriate error message on error
|
||||
|
||||
local builtin_rmdir = os.rmdir
|
||||
function os.rmdir(p)
|
||||
-- recursively remove subdirectories
|
||||
local dirs = os.matchdirs(p .. "/*")
|
||||
for _, dname in ipairs(dirs) do
|
||||
local ok, err = os.rmdir(dname)
|
||||
if not ok then
|
||||
return ok, err
|
||||
end
|
||||
end
|
||||
|
||||
-- remove any files
|
||||
local files = os.matchfiles(p .. "/*")
|
||||
for _, fname in ipairs(files) do
|
||||
local ok, err = os.remove(fname)
|
||||
if not ok then
|
||||
return ok, err
|
||||
end
|
||||
end
|
||||
|
||||
-- remove this directory
|
||||
return builtin_rmdir(p)
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Return information about a file.
|
||||
---
|
||||
|
||||
premake.override(os, "stat", function(base, p)
|
||||
p = path.normalize(p)
|
||||
return base(p)
|
||||
end)
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Translate command tokens into their OS or action specific equivalents.
|
||||
---
|
||||
|
||||
os.commandTokens = {
|
||||
_ = {
|
||||
chdir = function(v)
|
||||
return "cd " .. path.normalize(v)
|
||||
end,
|
||||
copy = function(v)
|
||||
return "cp -rf " .. path.normalize(v)
|
||||
end,
|
||||
copyfile = function(v)
|
||||
return "cp -f " .. path.normalize(v)
|
||||
end,
|
||||
copydir = function(v)
|
||||
return "cp -rf " .. path.normalize(v)
|
||||
end,
|
||||
delete = function(v)
|
||||
return "rm -f " .. path.normalize(v)
|
||||
end,
|
||||
echo = function(v)
|
||||
return "echo " .. v
|
||||
end,
|
||||
mkdir = function(v)
|
||||
return "mkdir -p " .. path.normalize(v)
|
||||
end,
|
||||
move = function(v)
|
||||
return "mv -f " .. path.normalize(v)
|
||||
end,
|
||||
rmdir = function(v)
|
||||
return "rm -rf " .. path.normalize(v)
|
||||
end,
|
||||
touch = function(v)
|
||||
return "touch " .. path.normalize(v)
|
||||
end,
|
||||
},
|
||||
windows = {
|
||||
chdir = function(v)
|
||||
return "chdir " .. path.translate(path.normalize(v))
|
||||
end,
|
||||
copy = function(v)
|
||||
v = path.translate(path.normalize(v))
|
||||
|
||||
-- Detect if there's multiple parts to the input, if there is grab the first part else grab the whole thing
|
||||
local src = string.match(v, '^".-"') or string.match(v, '^.- ') or v
|
||||
|
||||
-- Strip the trailing space from the second condition so that we don't have a space between src and '\\NUL'
|
||||
src = string.match(src, '^.*%S')
|
||||
|
||||
return "IF EXIST " .. src .. "\\ (xcopy /Q /E /Y /I " .. v .. " > nul) ELSE (xcopy /Q /Y /I " .. v .. " > nul)"
|
||||
end,
|
||||
copyfile = function(v)
|
||||
v = path.translate(path.normalize(v))
|
||||
-- XCOPY doesn't have a switch to assume destination is a file when it doesn't exist.
|
||||
-- A trailing * will suppress the prompt but requires the file extensions be the same length.
|
||||
-- Just use COPY instead, it actually works.
|
||||
return "copy /B /Y " .. v
|
||||
end,
|
||||
copydir = function(v)
|
||||
v = path.translate(path.normalize(v))
|
||||
return "xcopy /Q /E /Y /I " .. v
|
||||
end,
|
||||
delete = function(v)
|
||||
return "del " .. path.translate(path.normalize(v))
|
||||
end,
|
||||
echo = function(v)
|
||||
return "echo " .. v
|
||||
end,
|
||||
mkdir = function(v)
|
||||
v = path.translate(path.normalize(v))
|
||||
return "IF NOT EXIST " .. v .. " (mkdir " .. v .. ")"
|
||||
end,
|
||||
move = function(v)
|
||||
return "move /Y " .. path.translate(path.normalize(v))
|
||||
end,
|
||||
rmdir = function(v)
|
||||
return "rmdir /S /Q " .. path.translate(path.normalize(v))
|
||||
end,
|
||||
touch = function(v)
|
||||
v = path.translate(path.normalize(v))
|
||||
return string.format("type nul >> %s && copy /b %s+,, %s", v, v, v)
|
||||
end,
|
||||
}
|
||||
}
|
||||
|
||||
function os.translateCommands(cmd, map)
|
||||
map = map or os.target()
|
||||
if type(map) == "string" then
|
||||
map = os.commandTokens[map] or os.commandTokens["_"]
|
||||
end
|
||||
|
||||
local processOne = function(cmd)
|
||||
local i, j, prev
|
||||
repeat
|
||||
i, j = cmd:find("{.-}")
|
||||
if i then
|
||||
if i == prev then
|
||||
break
|
||||
end
|
||||
|
||||
local token = cmd:sub(i + 1, j - 1):lower()
|
||||
local args = cmd:sub(j + 2)
|
||||
local func = map[token] or os.commandTokens["_"][token]
|
||||
if func then
|
||||
cmd = cmd:sub(1, i -1) .. func(args)
|
||||
end
|
||||
|
||||
prev = i
|
||||
end
|
||||
until i == nil
|
||||
return cmd
|
||||
end
|
||||
|
||||
if type(cmd) == "table" then
|
||||
local result = {}
|
||||
for i = 1, #cmd do
|
||||
result[i] = processOne(cmd[i])
|
||||
end
|
||||
return result
|
||||
else
|
||||
return processOne(cmd)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Apply os slashes for decorated command paths.
|
||||
---
|
||||
function os.translateCommandAndPath(dir, map)
|
||||
if map == 'windows' then
|
||||
return path.translate(dir)
|
||||
end
|
||||
return dir
|
||||
end
|
||||
|
||||
---
|
||||
-- Translate decorated command paths into their OS equivalents.
|
||||
---
|
||||
function os.translateCommandsAndPaths(cmds, basedir, location, map)
|
||||
local translatedBaseDir = path.getrelative(location, basedir)
|
||||
|
||||
map = map or os.target()
|
||||
|
||||
local translateFunction = function(value)
|
||||
local result = path.join(translatedBaseDir, value)
|
||||
result = os.translateCommandAndPath(result, map)
|
||||
if value:endswith('/') or value:endswith('\\') or -- if orginal path ends with a slash then ensure the same
|
||||
value:endswith('/"') or value:endswith('\\"') then
|
||||
result = result .. '/'
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
local processOne = function(cmd)
|
||||
local replaceFunction = function(value)
|
||||
value = value:sub(3, #value - 1)
|
||||
return '"' .. translateFunction(value) .. '"'
|
||||
end
|
||||
return string.gsub(cmd, "%%%[[^%]\r\n]*%]", replaceFunction)
|
||||
end
|
||||
|
||||
if type(cmds) == "table" then
|
||||
local result = {}
|
||||
for i = 1, #cmds do
|
||||
result[i] = processOne(cmds[i])
|
||||
end
|
||||
return os.translateCommands(result, map)
|
||||
else
|
||||
return os.translateCommands(processOne(cmds), map)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Generate a UUID.
|
||||
--
|
||||
|
||||
os._uuids = {}
|
||||
|
||||
local builtin_uuid = os.uuid
|
||||
function os.uuid(name)
|
||||
local id = builtin_uuid(name)
|
||||
if name then
|
||||
if os._uuids[id] and os._uuids[id] ~= name then
|
||||
premake.warnOnce(id, "UUID clash between %s and %s", os._uuids[id], name)
|
||||
end
|
||||
os._uuids[id] = name
|
||||
end
|
||||
return id
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Get a set of tags for different 'platforms'
|
||||
--
|
||||
|
||||
os.systemTags =
|
||||
{
|
||||
["aix"] = { "aix", "posix" },
|
||||
["bsd"] = { "bsd", "posix" },
|
||||
["haiku"] = { "haiku", "posix" },
|
||||
["ios"] = { "ios", "darwin", "posix", "mobile" },
|
||||
["linux"] = { "linux", "posix" },
|
||||
["macosx"] = { "macosx", "darwin", "posix" },
|
||||
["solaris"] = { "solaris", "posix" },
|
||||
["windows"] = { "windows", "win32" },
|
||||
}
|
||||
|
||||
function os.getSystemTags(name)
|
||||
return os.systemTags[name:lower()] or { name:lower() }
|
||||
end
|
792
Src/external_dependencies/openmpt-trunk/include/premake/src/base/oven.lua
vendored
Normal file
792
Src/external_dependencies/openmpt-trunk/include/premake/src/base/oven.lua
vendored
Normal file
|
@ -0,0 +1,792 @@
|
|||
--
|
||||
-- base/oven.lua
|
||||
--
|
||||
-- Process the workspaces, projects, and configurations that were specified
|
||||
-- by the project script, and make them suitable for use by the exporters
|
||||
-- and actions. Fills in computed values (e.g. object directories) and
|
||||
-- optimizes the layout of the data for faster fetches.
|
||||
--
|
||||
-- Copyright (c) 2002-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
|
||||
p.oven = {}
|
||||
|
||||
local oven = p.oven
|
||||
local context = p.context
|
||||
|
||||
|
||||
--
|
||||
-- These fields get special treatment, "bubbling up" from the configurations
|
||||
-- to the project. This allows you to express, for example: "use this config
|
||||
-- map if this configuration is present in the project", and saves the step
|
||||
-- of clearing the current configuration filter before creating the map.
|
||||
--
|
||||
|
||||
p.oven.bubbledFields = {
|
||||
configmap = true,
|
||||
vpaths = true
|
||||
}
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Traverses the container hierarchy built up by the project scripts and
|
||||
-- filters, merges, and munges the information based on the current runtime
|
||||
-- environment in preparation for doing work on the results, like exporting
|
||||
-- project files.
|
||||
--
|
||||
-- This call replaces the existing the container objects with their
|
||||
-- processed replacements. If you are using the provided container APIs
|
||||
-- (p.global.*, p.workspace.*, etc.) this will be transparent.
|
||||
---
|
||||
|
||||
function oven.bake()
|
||||
-- reset the root _isBaked state.
|
||||
-- this really only affects the unit-tests, since that is the only place
|
||||
-- where multiple bakes per 'exe run' happen.
|
||||
local root = p.api.rootContainer()
|
||||
root._isBaked = false;
|
||||
|
||||
p.container.bake(root)
|
||||
end
|
||||
|
||||
function oven.bakeWorkspace(wks)
|
||||
return p.container.bake(wks)
|
||||
end
|
||||
|
||||
p.alias(oven, "bakeWorkspace", "bakeSolution")
|
||||
|
||||
|
||||
local function addCommonContextFilters(self)
|
||||
context.addFilter(self, "_ACTION", _ACTION)
|
||||
context.addFilter(self, "action", _ACTION)
|
||||
|
||||
self.system = self.system or os.target()
|
||||
context.addFilter(self, "system", os.getSystemTags(self.system))
|
||||
context.addFilter(self, "host", os.getSystemTags(os.host()))
|
||||
|
||||
-- Add command line options to the filtering options
|
||||
local options = {}
|
||||
for key, value in pairs(_OPTIONS) do
|
||||
local term = key
|
||||
if value ~= "" then
|
||||
term = term .. "=" .. tostring(value)
|
||||
end
|
||||
table.insert(options, term)
|
||||
end
|
||||
context.addFilter(self, "_OPTIONS", options)
|
||||
context.addFilter(self, "options", options)
|
||||
end
|
||||
|
||||
---
|
||||
-- Bakes a specific workspace object.
|
||||
---
|
||||
|
||||
function p.workspace.bake(self)
|
||||
-- Add filtering terms to the context and then compile the results. These
|
||||
-- terms describe the "operating environment"; only results contained by
|
||||
-- configuration blocks which match these terms will be returned.
|
||||
|
||||
addCommonContextFilters(self)
|
||||
|
||||
-- Set up my token expansion environment
|
||||
|
||||
self.environ = {
|
||||
wks = self,
|
||||
sln = self,
|
||||
}
|
||||
|
||||
context.compile(self)
|
||||
|
||||
-- Specify the workspaces's file system location; when path tokens are
|
||||
-- expanded in workspace values, they will be made relative to this.
|
||||
|
||||
self.location = self.location or self.basedir
|
||||
context.basedir(self, self.location)
|
||||
|
||||
-- Build a master list of configuration/platform pairs from all of the
|
||||
-- projects contained by the workspace; I will need this when generating
|
||||
-- workspace files in order to provide a map from workspace configurations
|
||||
-- to project configurations.
|
||||
|
||||
self.configs = oven.bakeConfigs(self)
|
||||
|
||||
-- Now bake down all of the projects contained in the workspace, and
|
||||
-- store that for future reference
|
||||
|
||||
p.container.bakeChildren(self)
|
||||
|
||||
-- I now have enough information to assign unique object directories
|
||||
-- to each project configuration in the workspace.
|
||||
|
||||
oven.bakeObjDirs(self)
|
||||
|
||||
-- now we can post process the projects for 'buildoutputs' files
|
||||
-- that have the 'compilebuildoutputs' flag
|
||||
oven.addGeneratedFiles(self)
|
||||
end
|
||||
|
||||
|
||||
function oven.addGeneratedFiles(wks)
|
||||
|
||||
local function addGeneratedFile(cfg, source, filename)
|
||||
-- mark that we have generated files.
|
||||
cfg.project.hasGeneratedFiles = true
|
||||
|
||||
-- add generated file to the project.
|
||||
local files = cfg.project._.files
|
||||
local node = files[filename]
|
||||
if not node then
|
||||
node = p.fileconfig.new(filename, cfg.project)
|
||||
files[filename] = node
|
||||
table.insert(files, node)
|
||||
end
|
||||
|
||||
-- always overwrite the dependency information.
|
||||
node.dependsOn = source
|
||||
node.generated = true
|
||||
|
||||
-- add to config if not already added.
|
||||
if not p.fileconfig.getconfig(node, cfg) then
|
||||
p.fileconfig.addconfig(node, cfg)
|
||||
end
|
||||
end
|
||||
|
||||
local function addFile(cfg, node)
|
||||
local filecfg = p.fileconfig.getconfig(node, cfg)
|
||||
if not filecfg or filecfg.flags.ExcludeFromBuild or not filecfg.compilebuildoutputs then
|
||||
return
|
||||
end
|
||||
|
||||
if p.fileconfig.hasCustomBuildRule(filecfg) then
|
||||
local buildoutputs = filecfg.buildoutputs
|
||||
if buildoutputs and #buildoutputs > 0 then
|
||||
for _, output in ipairs(buildoutputs) do
|
||||
if not path.islinkable(output) then
|
||||
addGeneratedFile(cfg, node, output)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
for prj in p.workspace.eachproject(wks) do
|
||||
local files = table.shallowcopy(prj._.files)
|
||||
for cfg in p.project.eachconfig(prj) do
|
||||
table.foreachi(files, function(node)
|
||||
addFile(cfg, node)
|
||||
end)
|
||||
end
|
||||
|
||||
-- generated files might screw up the object sequences.
|
||||
if prj.hasGeneratedFiles and p.project.isnative(prj) then
|
||||
oven.assignObjectSequences(prj)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function p.project.bake(self)
|
||||
verbosef(' Baking %s...', self.name)
|
||||
|
||||
self.solution = self.workspace
|
||||
self.global = self.workspace.global
|
||||
|
||||
local wks = self.workspace
|
||||
|
||||
-- Add filtering terms to the context to make it as specific as I can.
|
||||
-- Start with the same filtering that was applied at the workspace level.
|
||||
|
||||
context.copyFilters(self, wks)
|
||||
|
||||
-- Now filter on the current system and architecture, allowing the
|
||||
-- values that might already in the context to override my defaults.
|
||||
|
||||
self.system = self.system or os.target()
|
||||
context.addFilter(self, "system", os.getSystemTags(self.system))
|
||||
context.addFilter(self, "host", os.getSystemTags(os.host()))
|
||||
context.addFilter(self, "architecture", self.architecture)
|
||||
context.addFilter(self, "tags", self.tags)
|
||||
|
||||
-- The kind is a configuration level value, but if it has been set at the
|
||||
-- project level allow that to influence the other project-level results.
|
||||
|
||||
context.addFilter(self, "kind", self.kind)
|
||||
|
||||
-- Allow the project object to also be treated like a configuration
|
||||
|
||||
self.project = self
|
||||
|
||||
-- Populate the token expansion environment
|
||||
|
||||
self.environ = {
|
||||
wks = wks,
|
||||
sln = wks,
|
||||
prj = self,
|
||||
}
|
||||
|
||||
-- Go ahead and distill all of that down now; this is my new project object
|
||||
|
||||
context.compile(self)
|
||||
|
||||
p.container.bakeChildren(self)
|
||||
|
||||
-- Set the context's base directory to the project's file system
|
||||
-- location. Any path tokens which are expanded in non-path fields
|
||||
-- are made relative to this, ensuring a portable generated project.
|
||||
|
||||
self.location = self.location or self.basedir
|
||||
context.basedir(self, self.location)
|
||||
|
||||
-- This bit could use some work: create a canonical set of configurations
|
||||
-- for the project, along with a mapping from the workspace's configurations.
|
||||
-- This works, but it could probably be simplified.
|
||||
|
||||
local cfgs = table.fold(self.configurations or {}, self.platforms or {})
|
||||
oven.bubbleFields(self, self, cfgs)
|
||||
self._cfglist = oven.bakeConfigList(self, cfgs)
|
||||
|
||||
-- Don't allow a project-level system setting to influence the configurations
|
||||
|
||||
local projectSystem = self.system
|
||||
self.system = nil
|
||||
|
||||
-- Finally, step through the list of configurations I built above and
|
||||
-- bake all of those down into configuration contexts as well. Store
|
||||
-- the results with the project.
|
||||
|
||||
self.configs = {}
|
||||
|
||||
for _, pairing in ipairs(self._cfglist) do
|
||||
local buildcfg = pairing[1]
|
||||
local platform = pairing[2]
|
||||
local cfg = oven.bakeConfig(wks, self, buildcfg, platform)
|
||||
|
||||
if p.action.supportsconfig(p.action.current(), cfg) then
|
||||
self.configs[(buildcfg or "*") .. (platform or "")] = cfg
|
||||
end
|
||||
end
|
||||
|
||||
-- Process the sub-objects that are contained by this project. The
|
||||
-- configuration build stuff above really belongs in here now.
|
||||
|
||||
self._ = {}
|
||||
self._.files = oven.bakeFiles(self)
|
||||
|
||||
-- If this type of project generates object files, look for files that will
|
||||
-- generate object name collisions (i.e. src/hello.cpp and tests/hello.cpp
|
||||
-- both create hello.o) and assign unique sequence numbers to each. I need
|
||||
-- to do this up front to make sure the sequence numbers are the same for
|
||||
-- all the tools, even they reorder the source file list.
|
||||
|
||||
if p.project.isnative(self) then
|
||||
oven.assignObjectSequences(self)
|
||||
end
|
||||
|
||||
-- at the end, restore the system, so it's usable elsewhere.
|
||||
self.system = projectSystem
|
||||
end
|
||||
|
||||
|
||||
function p.rule.bake(self)
|
||||
-- Add filtering terms to the context and then compile the results. These
|
||||
-- terms describe the "operating environment"; only results contained by
|
||||
-- configuration blocks which match these terms will be returned.
|
||||
|
||||
addCommonContextFilters(self)
|
||||
|
||||
-- Populate the token expansion environment
|
||||
|
||||
self.environ = {
|
||||
rule = self,
|
||||
}
|
||||
|
||||
-- Go ahead and distill all of that down now; this is my new rule object
|
||||
|
||||
context.compile(self)
|
||||
|
||||
-- sort the propertydefinition table.
|
||||
table.sort(self.propertydefinition, function (a, b)
|
||||
return a.name < b.name
|
||||
end)
|
||||
|
||||
-- Set the context's base directory to the rule's file system
|
||||
-- location. Any path tokens which are expanded in non-path fields
|
||||
-- are made relative to this, ensuring a portable generated rule.
|
||||
|
||||
self.location = self.location or self.basedir
|
||||
context.basedir(self, self.location)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Assigns a unique objects directory to every configuration of every project
|
||||
-- in the workspace, taking any objdir settings into account, to ensure builds
|
||||
-- from different configurations won't step on each others' object files.
|
||||
-- The path is built from these choices, in order:
|
||||
--
|
||||
-- [1] -> the objects directory as set in the config
|
||||
-- [2] -> [1] + the platform name
|
||||
-- [3] -> [2] + the build configuration name
|
||||
-- [4] -> [3] + the project name
|
||||
--
|
||||
-- @param wks
|
||||
-- The workspace to process. The directories are modified inline.
|
||||
--
|
||||
|
||||
function oven.bakeObjDirs(wks)
|
||||
-- function to compute the four options for a specific configuration
|
||||
local function getobjdirs(cfg)
|
||||
-- the "!" prefix indicates the directory is not to be touched
|
||||
local objdir = cfg.objdir or "obj"
|
||||
local i = objdir:find("!", 1, true)
|
||||
if i then
|
||||
cfg.objdir = objdir:sub(1, i - 1) .. objdir:sub(i + 1)
|
||||
return nil
|
||||
end
|
||||
|
||||
local dirs = {}
|
||||
|
||||
local dir = path.getabsolute(path.join(cfg.project.location, objdir))
|
||||
table.insert(dirs, dir)
|
||||
|
||||
if cfg.platform then
|
||||
dir = path.join(dir, cfg.platform)
|
||||
table.insert(dirs, dir)
|
||||
end
|
||||
|
||||
dir = path.join(dir, cfg.buildcfg)
|
||||
table.insert(dirs, dir)
|
||||
|
||||
dir = path.join(dir, cfg.project.name)
|
||||
table.insert(dirs, dir)
|
||||
|
||||
return dirs
|
||||
end
|
||||
|
||||
-- walk all of the configs in the workspace, and count the number of
|
||||
-- times each obj dir gets used
|
||||
local counts = {}
|
||||
local configs = {}
|
||||
|
||||
for prj in p.workspace.eachproject(wks) do
|
||||
for cfg in p.project.eachconfig(prj) do
|
||||
-- get the dirs for this config, and associate them together,
|
||||
-- and increment a counter for each one discovered
|
||||
local dirs = getobjdirs(cfg)
|
||||
if dirs then
|
||||
configs[cfg] = dirs
|
||||
for _, dir in ipairs(dirs or {}) do
|
||||
counts[dir] = (counts[dir] or 0) + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- now walk the list again, and assign the first unique value
|
||||
for cfg, dirs in pairs(configs) do
|
||||
for _, dir in ipairs(dirs) do
|
||||
if counts[dir] == 1 then
|
||||
cfg.objdir = dir
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Create a list of workspace-level build configuration/platform pairs.
|
||||
--
|
||||
|
||||
function oven.bakeConfigs(wks)
|
||||
local buildcfgs = wks.configurations or {}
|
||||
local platforms = wks.platforms or {}
|
||||
|
||||
local configs = {}
|
||||
|
||||
local pairings = table.fold(buildcfgs, platforms)
|
||||
for _, pairing in ipairs(pairings) do
|
||||
local cfg = oven.bakeConfig(wks, nil, pairing[1], pairing[2])
|
||||
if p.action.supportsconfig(p.action.current(), cfg) then
|
||||
table.insert(configs, cfg)
|
||||
end
|
||||
end
|
||||
|
||||
return configs
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- It can be useful to state "use this map if this configuration is present".
|
||||
-- To allow this to happen, config maps that are specified within a project
|
||||
-- configuration are allowed to "bubble up" to the top level. Currently,
|
||||
-- maps are the only values that get this special behavior.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The project context information.
|
||||
-- @param cset
|
||||
-- The project's original configuration set, which contains the settings
|
||||
-- of all the project configurations.
|
||||
-- @param cfgs
|
||||
-- The list of the project's build cfg/platform pairs.
|
||||
--
|
||||
|
||||
function oven.bubbleFields(ctx, cset, cfgs)
|
||||
-- build a query filter that will match any configuration name,
|
||||
-- within the existing constraints of the project
|
||||
|
||||
local configurations = {}
|
||||
local platforms = {}
|
||||
|
||||
for _, cfg in ipairs(cfgs) do
|
||||
if cfg[1] then
|
||||
table.insert(configurations, cfg[1]:lower())
|
||||
end
|
||||
if cfg[2] then
|
||||
table.insert(platforms, cfg[2]:lower())
|
||||
end
|
||||
end
|
||||
|
||||
local terms = table.deepcopy(ctx.terms)
|
||||
terms.configurations = configurations
|
||||
terms.platforms = platforms
|
||||
|
||||
for key in pairs(oven.bubbledFields) do
|
||||
local field = p.field.get(key)
|
||||
if not field then
|
||||
ctx[key] = rawget(ctx, key)
|
||||
else
|
||||
local value = p.configset.fetch(cset, field, terms, ctx)
|
||||
if value then
|
||||
ctx[key] = value
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Builds a list of build configuration/platform pairs for a project,
|
||||
-- along with a mapping between the workspace and project configurations.
|
||||
--
|
||||
-- @param ctx
|
||||
-- The project context information.
|
||||
-- @param cfgs
|
||||
-- The list of the project's build cfg/platform pairs.
|
||||
-- @return
|
||||
-- An array of the project's build configuration/platform pairs,
|
||||
-- based on any discovered mappings.
|
||||
--
|
||||
|
||||
function oven.bakeConfigList(ctx, cfgs)
|
||||
-- run them all through the project's config map
|
||||
for i, cfg in ipairs(cfgs) do
|
||||
cfgs[i] = p.project.mapconfig(ctx, cfg[1], cfg[2])
|
||||
end
|
||||
|
||||
-- walk through the result and remove any duplicates
|
||||
local buildcfgs = {}
|
||||
local platforms = {}
|
||||
|
||||
for _, pairing in ipairs(cfgs) do
|
||||
local buildcfg = pairing[1]
|
||||
local platform = pairing[2]
|
||||
|
||||
if not table.contains(buildcfgs, buildcfg) then
|
||||
table.insert(buildcfgs, buildcfg)
|
||||
end
|
||||
|
||||
if platform and not table.contains(platforms, platform) then
|
||||
table.insert(platforms, platform)
|
||||
end
|
||||
end
|
||||
|
||||
-- merge these de-duped lists back into pairs for the final result
|
||||
return table.fold(buildcfgs, platforms)
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Flattens out the build settings for a particular build configuration and
|
||||
-- platform pairing, and returns the result.
|
||||
--
|
||||
-- @param wks
|
||||
-- The workpace which contains the configuration data.
|
||||
-- @param prj
|
||||
-- The project which contains the configuration data. Can be nil.
|
||||
-- @param buildcfg
|
||||
-- The target build configuration, a value from configurations().
|
||||
-- @param platform
|
||||
-- The target platform, a value from platforms().
|
||||
-- @param extraFilters
|
||||
-- Optional. Any extra filter terms to use when retrieving the data for
|
||||
-- this configuration
|
||||
---
|
||||
|
||||
function oven.bakeConfig(wks, prj, buildcfg, platform, extraFilters)
|
||||
|
||||
-- Set the default system and architecture values; if the platform's
|
||||
-- name matches a known system or architecture, use that as the default.
|
||||
-- More than a convenience; this is required to work properly with
|
||||
-- external Visual Studio project files.
|
||||
|
||||
local system = os.target()
|
||||
local architecture = nil
|
||||
local toolset = p.action.current().toolset
|
||||
|
||||
if platform then
|
||||
system = p.api.checkValue(p.fields.system, platform) or system
|
||||
architecture = p.api.checkValue(p.fields.architecture, platform) or architecture
|
||||
toolset = p.api.checkValue(p.fields.toolset, platform) or toolset
|
||||
end
|
||||
|
||||
-- Wrap the projects's configuration set (which contains all of the information
|
||||
-- provided by the project script) with a context object. The context handles
|
||||
-- the expansion of tokens, and caching of retrieved values. The environment
|
||||
-- values are used when expanding tokens.
|
||||
|
||||
local environ = {
|
||||
wks = wks,
|
||||
sln = wks,
|
||||
prj = prj,
|
||||
}
|
||||
|
||||
local ctx = context.new(prj or wks, environ)
|
||||
|
||||
ctx.project = prj
|
||||
ctx.workspace = wks
|
||||
ctx.solution = wks
|
||||
ctx.global = wks.global
|
||||
ctx.buildcfg = buildcfg
|
||||
ctx.platform = platform
|
||||
ctx.action = _ACTION
|
||||
|
||||
-- Allow the configuration information to be accessed by tokens contained
|
||||
-- within the configuration itself
|
||||
|
||||
environ.cfg = ctx
|
||||
|
||||
-- Add filtering terms to the context and then compile the results. These
|
||||
-- terms describe the "operating environment"; only results contained by
|
||||
-- configuration blocks which match these terms will be returned. Start
|
||||
-- by copying over the top-level environment from the workspace. Don't
|
||||
-- copy the project terms though, so configurations can override those.
|
||||
|
||||
context.copyFilters(ctx, wks)
|
||||
|
||||
context.addFilter(ctx, "configurations", buildcfg)
|
||||
context.addFilter(ctx, "platforms", platform)
|
||||
if prj then
|
||||
context.addFilter(ctx, "language", prj.language)
|
||||
end
|
||||
|
||||
-- allow the project script to override the default system
|
||||
ctx.system = ctx.system or system
|
||||
context.addFilter(ctx, "system", os.getSystemTags(ctx.system))
|
||||
context.addFilter(ctx, "host", os.getSystemTags(os.host()))
|
||||
|
||||
-- allow the project script to override the default architecture
|
||||
ctx.architecture = ctx.architecture or architecture
|
||||
context.addFilter(ctx, "architecture", ctx.architecture)
|
||||
|
||||
-- allow the project script to override the default toolset
|
||||
ctx.toolset = _OPTIONS.cc or ctx.toolset or toolset
|
||||
context.addFilter(ctx, "toolset", ctx.toolset)
|
||||
|
||||
-- if a kind is set, allow that to influence the configuration
|
||||
context.addFilter(ctx, "kind", ctx.kind)
|
||||
|
||||
-- if a sharedlibtype is set, allow that to influence the configuration
|
||||
context.addFilter(ctx, "sharedlibtype", ctx.sharedlibtype)
|
||||
|
||||
-- if tags are set, allow that to influence the configuration
|
||||
context.addFilter(ctx, "tags", ctx.tags)
|
||||
|
||||
-- if any extra filters were specified, can include them now
|
||||
if extraFilters then
|
||||
for k, v in pairs(extraFilters) do
|
||||
context.addFilter(ctx, k, v)
|
||||
end
|
||||
end
|
||||
|
||||
context.compile(ctx)
|
||||
|
||||
ctx.location = ctx.location or prj and prj.location
|
||||
context.basedir(ctx, ctx.location)
|
||||
|
||||
-- Fill in a few calculated for the configuration, including the long
|
||||
-- and short names and the build and link target.
|
||||
|
||||
oven.finishConfig(ctx)
|
||||
return ctx
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Create configuration objects for each file contained in the project. This
|
||||
-- collects and collates all of the values specified in the project scripts,
|
||||
-- and computes extra values like the relative path and object names.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project object being baked. The project
|
||||
-- @return
|
||||
-- A collection of file configurations, keyed by both the absolute file
|
||||
-- path and an alpha-sorted index.
|
||||
--
|
||||
|
||||
function oven.bakeFiles(prj)
|
||||
|
||||
local files = {}
|
||||
|
||||
-- Start by building a comprehensive list of all the files contained by the
|
||||
-- project. Some files may only be included in a subset of configurations so
|
||||
-- I need to look at them all.
|
||||
|
||||
for cfg in p.project.eachconfig(prj) do
|
||||
local function addFile(fname, i)
|
||||
|
||||
-- If this is the first time I've seen this file, start a new
|
||||
-- file configuration for it. Track both by key for quick lookups
|
||||
-- and indexed for ordered iteration.
|
||||
local fcfg = files[fname]
|
||||
if not fcfg then
|
||||
fcfg = p.fileconfig.new(fname, prj)
|
||||
fcfg.order = i
|
||||
files[fname] = fcfg
|
||||
table.insert(files, fcfg)
|
||||
end
|
||||
|
||||
p.fileconfig.addconfig(fcfg, cfg)
|
||||
end
|
||||
|
||||
table.foreachi(cfg.files, addFile)
|
||||
|
||||
-- If this project uses NuGet, we need to add the generated
|
||||
-- packages.config file to the project. Is there a better place to
|
||||
-- do this?
|
||||
|
||||
if #prj.nuget > 0 and (_ACTION < "vs2017" or p.project.iscpp(prj)) then
|
||||
addFile("packages.config")
|
||||
end
|
||||
end
|
||||
|
||||
-- Alpha sort the indices, so I will get consistent results in
|
||||
-- the exported project files.
|
||||
|
||||
table.sort(files, function(a,b)
|
||||
return a.vpath < b.vpath
|
||||
end)
|
||||
|
||||
return files
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Assign unique sequence numbers to any source code files that would generate
|
||||
-- conflicting object file names (i.e. src/hello.cpp and tests/hello.cpp both
|
||||
-- create hello.o).
|
||||
--
|
||||
-- a file list of: src/hello.cpp, tests/hello.cpp and src/hello1.cpp also generates
|
||||
-- conflicting object file names - hello1.o
|
||||
|
||||
function oven.uniqueSequence(f, cfg, seq, bases)
|
||||
while true do
|
||||
f.sequence = seq[cfg] or 0
|
||||
seq[cfg] = f.sequence + 1
|
||||
|
||||
if f.sequence == 0 then
|
||||
-- first time seeing this objname
|
||||
break
|
||||
end
|
||||
|
||||
-- getting here has changed our sequence number, but this new "basename"
|
||||
-- may still collide with files that actually end with this "sequence number"
|
||||
-- so we have to check the bases table now
|
||||
|
||||
-- objname changes with the sequence number on every loop
|
||||
local lowerobj = f.objname:lower()
|
||||
if not bases[lowerobj] then
|
||||
-- this is the first appearance of a file that produces this objname
|
||||
-- intialize the table for any future basename that matches our objname
|
||||
bases[lowerobj] = {}
|
||||
end
|
||||
|
||||
if not bases[lowerobj][cfg] then
|
||||
-- not a collision
|
||||
-- start a sequence for a future basename that matches our objname for this cfg
|
||||
bases[lowerobj][cfg] = 1
|
||||
break
|
||||
end
|
||||
-- else we have a objname collision, try the next sequence number
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function oven.assignObjectSequences(prj)
|
||||
|
||||
-- Iterate over the file configurations which were prepared and cached in
|
||||
-- project.bakeFiles(); find buildable files with common base file names.
|
||||
|
||||
local bases = {}
|
||||
table.foreachi(prj._.files, function(file)
|
||||
|
||||
-- Only consider sources that actually generate object files
|
||||
|
||||
if not path.isnativefile(file.abspath) then
|
||||
return
|
||||
end
|
||||
|
||||
-- For each base file name encountered, keep a count of the number of
|
||||
-- collisions that have occurred for each project configuration. Use
|
||||
-- this collision count to generate the unique object file names.
|
||||
|
||||
local lowerbase = file.basename:lower()
|
||||
if not bases[lowerbase] then
|
||||
bases[lowerbase] = {}
|
||||
end
|
||||
|
||||
local sequences = bases[lowerbase]
|
||||
|
||||
for cfg in p.project.eachconfig(prj) do
|
||||
local fcfg = p.fileconfig.getconfig(file, cfg)
|
||||
if fcfg ~= nil and not fcfg.flags.ExcludeFromBuild then
|
||||
oven.uniqueSequence(fcfg, cfg, sequences, bases)
|
||||
end
|
||||
end
|
||||
|
||||
-- Makefiles don't use per-configuration object names yet; keep
|
||||
-- this around until they do. At which point I might consider just
|
||||
-- storing the sequence number instead of the whole object name
|
||||
|
||||
oven.uniqueSequence(file, prj, sequences, bases)
|
||||
|
||||
end)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Finish the baking process for a workspace or project level configurations.
|
||||
-- Doesn't bake per se, just fills in some calculated values.
|
||||
--
|
||||
|
||||
function oven.finishConfig(cfg)
|
||||
-- assign human-readable names
|
||||
cfg.longname = table.concat({ cfg.buildcfg, cfg.platform }, "|")
|
||||
cfg.shortname = table.concat({ cfg.buildcfg, cfg.platform }, " ")
|
||||
cfg.shortname = cfg.shortname:gsub(" ", "_"):lower()
|
||||
cfg.name = cfg.longname
|
||||
|
||||
-- compute build and link targets
|
||||
if cfg.project and cfg.kind then
|
||||
cfg.buildtarget = p.config.gettargetinfo(cfg)
|
||||
cfg.buildtarget.relpath = p.project.getrelative(cfg.project, cfg.buildtarget.abspath)
|
||||
|
||||
cfg.linktarget = p.config.getlinkinfo(cfg)
|
||||
cfg.linktarget.relpath = p.project.getrelative(cfg.project, cfg.linktarget.abspath)
|
||||
end
|
||||
end
|
304
Src/external_dependencies/openmpt-trunk/include/premake/src/base/path.lua
vendored
Normal file
304
Src/external_dependencies/openmpt-trunk/include/premake/src/base/path.lua
vendored
Normal file
|
@ -0,0 +1,304 @@
|
|||
--
|
||||
-- path.lua
|
||||
-- Path manipulation functions.
|
||||
-- Copyright (c) 2002-2014 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
--
|
||||
-- Appends a file extension to the path. Verifies that the extension
|
||||
-- isn't already present, and adjusts quotes as necessary.
|
||||
--
|
||||
|
||||
function path.appendExtension(p, ext)
|
||||
-- if the extension is nil or empty, do nothing
|
||||
if not ext or ext == "" then
|
||||
return p
|
||||
end
|
||||
|
||||
-- if the path ends with a quote, pull it off
|
||||
local endquote
|
||||
if p:endswith('"') then
|
||||
p = p:sub(1, -2)
|
||||
endquote = '"'
|
||||
end
|
||||
|
||||
-- add the extension if it isn't there already
|
||||
if not path.hasextension(p, ext) then
|
||||
p = p .. ext
|
||||
end
|
||||
|
||||
-- put the quote back if necessary
|
||||
if endquote then
|
||||
p = p .. endquote
|
||||
end
|
||||
|
||||
return p
|
||||
end
|
||||
|
||||
path.appendextension = path.appendExtension
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the filename portion of a path, without any extension.
|
||||
--
|
||||
|
||||
function path.getbasename(p)
|
||||
local name = path.getname(p)
|
||||
local i = name:findlast(".", true)
|
||||
if (i) then
|
||||
return name:sub(1, i - 1)
|
||||
else
|
||||
return name
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the directory portion of a path, or an empty string if
|
||||
-- the path does not include a directory.
|
||||
--
|
||||
|
||||
function path.getdirectory(p)
|
||||
local i = p:findlast("/", true)
|
||||
if (i) then
|
||||
if i > 1 then i = i - 1 end
|
||||
return p:sub(1, i)
|
||||
else
|
||||
return "."
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the drive letter, if a Windows path.
|
||||
--
|
||||
|
||||
function path.getdrive(p)
|
||||
local ch1 = p:sub(1,1)
|
||||
local ch2 = p:sub(2,2)
|
||||
if ch2 == ":" then
|
||||
return ch1
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the file extension.
|
||||
--
|
||||
|
||||
function path.getextension(p)
|
||||
p = path.getname(p)
|
||||
local i = p:findlast(".", true)
|
||||
if (i) then
|
||||
return p:sub(i)
|
||||
else
|
||||
return ""
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Remove extension from path.
|
||||
--
|
||||
|
||||
function path.removeextension(p)
|
||||
local i = p:findlast(".", true)
|
||||
if (i) then
|
||||
if i > 1 then i = i - 1 end
|
||||
return p:sub(1, i)
|
||||
else
|
||||
return ""
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the filename portion of a path.
|
||||
--
|
||||
|
||||
function path.getname(p)
|
||||
local i = p:findlast("[/\\]")
|
||||
if (i) then
|
||||
return p:sub(i + 1)
|
||||
else
|
||||
return p
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename has a particular extension.
|
||||
--
|
||||
-- @param fname
|
||||
-- The file name to test.
|
||||
-- @param extensions
|
||||
-- The extension(s) to test. Maybe be a string or table.
|
||||
--
|
||||
|
||||
function path.hasextension(fname, extensions)
|
||||
local fext = path.getextension(fname):lower()
|
||||
if type(extensions) == "table" then
|
||||
for _, extension in pairs(extensions) do
|
||||
if fext == extension then
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
else
|
||||
return (fext == extensions)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents various source languages.
|
||||
--
|
||||
|
||||
function path.isasmfile(fname)
|
||||
return path.hasextension(fname, { ".s" })
|
||||
end
|
||||
|
||||
function path.iscfile(fname)
|
||||
return path.hasextension(fname, { ".c" })
|
||||
or path.isasmfile(fname) -- is this really right?
|
||||
or path.isobjcfile(fname) -- there is code that depends on this behaviour, which would need to change
|
||||
end
|
||||
|
||||
function path.iscppfile(fname)
|
||||
return path.hasextension(fname, { ".cc", ".cpp", ".cxx", ".c++" })
|
||||
or path.isobjcppfile(fname) -- is this really right?
|
||||
or path.iscfile(fname)
|
||||
end
|
||||
|
||||
function path.isobjcfile(fname)
|
||||
return path.hasextension(fname, { ".m" })
|
||||
end
|
||||
|
||||
function path.isobjcppfile(fname)
|
||||
return path.hasextension(fname, { ".mm" })
|
||||
end
|
||||
|
||||
function path.iscppheader(fname)
|
||||
return path.hasextension(fname, { ".h", ".hh", ".hpp", ".hxx" })
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents a native language source file.
|
||||
-- These checks are used to prevent passing non-code files to the compiler
|
||||
-- in makefiles. It is not foolproof, but it has held up well. I'm open to
|
||||
-- better suggestions.
|
||||
--
|
||||
|
||||
function path.isnativefile(fname)
|
||||
return path.iscfile(fname)
|
||||
or path.iscppfile(fname)
|
||||
or path.isasmfile(fname)
|
||||
or path.isobjcfile(fname)
|
||||
or path.isobjcppfile(fname)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents an OS X framework.
|
||||
--
|
||||
|
||||
function path.isframework(fname)
|
||||
return path.hasextension(fname, ".framework")
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Is this a type of file that can be linked?
|
||||
---
|
||||
|
||||
function path.islinkable(fname)
|
||||
return path.hasextension(fname, { ".o", ".obj", ".a", ".lib", ".so" })
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents an object file.
|
||||
--
|
||||
|
||||
function path.isobjectfile(fname)
|
||||
return path.hasextension(fname, { ".o", ".obj" })
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents a Windows resource file. This check
|
||||
-- is used to prevent passing non-resources to the compiler in makefiles.
|
||||
--
|
||||
|
||||
function path.isresourcefile(fname)
|
||||
return path.hasextension(fname, ".rc")
|
||||
end
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents a Windows idl file.
|
||||
--
|
||||
|
||||
function path.isidlfile(fname)
|
||||
return path.hasextension(fname, ".idl")
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the filename represents a hlsl shader file.
|
||||
--
|
||||
|
||||
function path.ishlslfile(fname)
|
||||
return path.hasextension(fname, ".hlsl")
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Takes a path which is relative to one location and makes it relative
|
||||
-- to another location instead.
|
||||
--
|
||||
|
||||
function path.rebase(p, oldbase, newbase)
|
||||
p = path.getabsolute(path.join(oldbase, p))
|
||||
p = path.getrelative(newbase, p)
|
||||
return p
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Replace the file extension.
|
||||
--
|
||||
|
||||
function path.replaceextension(p, newext)
|
||||
local ext = path.getextension(p)
|
||||
|
||||
if not ext then
|
||||
return p
|
||||
end
|
||||
|
||||
if #newext > 0 and not newext:findlast(".", true) then
|
||||
newext = "."..newext
|
||||
end
|
||||
|
||||
return p:match("^(.*)"..ext.."$")..newext
|
||||
end
|
||||
|
||||
--
|
||||
-- Get the default seperator for path.translate
|
||||
--
|
||||
|
||||
function path.getDefaultSeparator()
|
||||
if os.istarget('windows') then
|
||||
return '\\'
|
||||
else
|
||||
return '/'
|
||||
end
|
||||
end
|
432
Src/external_dependencies/openmpt-trunk/include/premake/src/base/premake.lua
vendored
Normal file
432
Src/external_dependencies/openmpt-trunk/include/premake/src/base/premake.lua
vendored
Normal file
|
@ -0,0 +1,432 @@
|
|||
--
|
||||
-- premake.lua
|
||||
-- High-level helper functions for the project exporters.
|
||||
-- Copyright (c) 2002-2015 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
|
||||
|
||||
|
||||
-- Store captured output text for later testing
|
||||
|
||||
local _captured
|
||||
|
||||
-- The string escaping function.
|
||||
|
||||
local _esc = function(v) return v end
|
||||
|
||||
-- The output settings and defaults
|
||||
|
||||
local _eol = "\n"
|
||||
local _indentString = "\t"
|
||||
local _indentLevel = 0
|
||||
|
||||
-- Set up the global configuration scope. There can be only one.
|
||||
|
||||
global("root")
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Capture and store everything sent through the output stream functions
|
||||
-- premake.w(), premake.x(), and premake.out(). Retrieve the captured
|
||||
-- text using the premake.captured() function.
|
||||
--
|
||||
-- @param fn
|
||||
-- A function to execute. Any output calls made during the execution
|
||||
-- of the function will be captured.
|
||||
-- @return
|
||||
-- The captured output.
|
||||
---
|
||||
|
||||
function premake.capture(fn)
|
||||
-- start a new capture without forgetting the old one
|
||||
local old = _captured
|
||||
_captured = buffered.new()
|
||||
|
||||
-- capture
|
||||
fn()
|
||||
|
||||
-- build the result
|
||||
local captured = p.captured()
|
||||
|
||||
-- free the capture buffer.
|
||||
buffered.close(_captured)
|
||||
|
||||
-- restore the old capture and done
|
||||
_captured = old
|
||||
return captured
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns the captured text and stops capturing.
|
||||
--
|
||||
|
||||
function premake.captured()
|
||||
if _captured then
|
||||
return buffered.tostring(_captured)
|
||||
else
|
||||
return ""
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Set the output stream end-of-line sequence.
|
||||
--
|
||||
-- @param s
|
||||
-- The string to use to mark line ends, or nil to keep the existing
|
||||
-- EOL sequence.
|
||||
-- @return
|
||||
-- The new EOL sequence.
|
||||
---
|
||||
|
||||
function premake.eol(s)
|
||||
_eol = s or _eol
|
||||
return _eol
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Handle escaping of strings for various outputs.
|
||||
--
|
||||
-- @param value
|
||||
-- If this is a string: escape it and return the new value. If it is an
|
||||
-- array, return a new array of escaped values.
|
||||
-- @return
|
||||
-- If the input was a single string, returns the escaped version. If it
|
||||
-- was an array, returns an corresponding array of escaped strings.
|
||||
---
|
||||
|
||||
function premake.esc(value)
|
||||
if type(value) == "table" then
|
||||
local result = {}
|
||||
local n = #value
|
||||
for i = 1, n do
|
||||
table.insert(result, p.esc(value[i]))
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
return _esc(value or "")
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Set a new string escaping function.
|
||||
--
|
||||
-- @param func
|
||||
-- The new escaping function, which should take a single string argument
|
||||
-- and return the escaped version of that string. If nil, uses a default
|
||||
-- no-op function.
|
||||
---
|
||||
|
||||
function premake.escaper(func)
|
||||
_esc = func
|
||||
if not _esc then
|
||||
_esc = function (value) return value end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns a boolean if the file was modified
|
||||
-- Open a file for output, and call a function to actually do the writing.
|
||||
-- Used by the actions to generate workspace and project files.
|
||||
--
|
||||
-- @param obj
|
||||
-- A workspace or project object; will be passed to the callback function.
|
||||
-- @param ext
|
||||
-- An optional extension for the generated file, with the leading dot.
|
||||
-- @param callback
|
||||
-- The function responsible for writing the file, should take a workspace
|
||||
-- or project as a parameters.
|
||||
--
|
||||
|
||||
function premake.generate(obj, ext, callback)
|
||||
local output = p.capture(function ()
|
||||
_indentLevel = 0
|
||||
callback(obj)
|
||||
_indentLevel = 0
|
||||
end)
|
||||
|
||||
local fn = p.filename(obj, ext)
|
||||
|
||||
-- make sure output folder exists.
|
||||
local dir = path.getdirectory(fn)
|
||||
local ok, err = os.mkdir(dir)
|
||||
if not ok then
|
||||
error(err, 0)
|
||||
end
|
||||
|
||||
local f, err = os.writefile_ifnotequal(output, fn);
|
||||
|
||||
if (f == 0) then
|
||||
return false -- file not modified
|
||||
elseif (f < 0) then
|
||||
error(err, 0)
|
||||
elseif (f > 0) then
|
||||
printf("Generated %s...", path.getrelative(os.getcwd(), fn))
|
||||
return true -- file modified
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Marks a file as modified without changing its contents
|
||||
--
|
||||
-- @param obj
|
||||
-- A workspace or project object; will be passed to the callback function.
|
||||
-- @param ext
|
||||
-- An optional extension for the generated file, with the leading dot.
|
||||
--
|
||||
|
||||
function premake.touch(obj, ext)
|
||||
local fn = premake.filename(obj, ext)
|
||||
|
||||
-- make sure output folder exists.
|
||||
local dir = path.getdirectory(fn)
|
||||
local ok, err = os.mkdir(dir)
|
||||
if not ok then
|
||||
error(err, 0)
|
||||
end
|
||||
|
||||
local f, err = os.touchfile(fn);
|
||||
|
||||
if (f == 0) then
|
||||
return false -- file marked as modified
|
||||
elseif (f < 0) then
|
||||
error(err, 0)
|
||||
elseif (f > 0) then
|
||||
return true -- file created
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Returns the full path a file generated from any of the project
|
||||
-- objects (project, workspace, rule).
|
||||
--
|
||||
-- @param obj
|
||||
-- The project object being generated.
|
||||
-- @param ext
|
||||
-- An optional extension for the generated file, with the leading dot.
|
||||
---
|
||||
|
||||
function premake.filename(obj, ext)
|
||||
local fname = obj.location or obj.basedir
|
||||
if ext and not ext:startswith(".") then
|
||||
fname = path.join(fname, ext)
|
||||
else
|
||||
fname = path.join(fname, obj.filename)
|
||||
if ext then
|
||||
fname = fname .. ext
|
||||
end
|
||||
end
|
||||
return path.getabsolute(fname)
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Sets the output indentation parameters.
|
||||
--
|
||||
-- @param s
|
||||
-- The indentation string.
|
||||
-- @param i
|
||||
-- The new indentation level, or nil to reset to zero.
|
||||
---
|
||||
|
||||
function premake.indent(s, i)
|
||||
_indentString = s or "\t"
|
||||
_indentLevel = i or 0
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a simple, unformatted string to the output stream, with no indentation
|
||||
-- or end of line sequence.
|
||||
---
|
||||
|
||||
function premake.out(s)
|
||||
if not _captured then
|
||||
io.write(s)
|
||||
else
|
||||
buffered.write(_captured, s)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a simple, unformatted string to the output stream, with no indentation,
|
||||
-- and append the current EOL sequence.
|
||||
---
|
||||
|
||||
function premake.outln(s)
|
||||
p.out(s)
|
||||
p.out(_eol or "\n")
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a formatted string to the exported file, after decreasing the
|
||||
-- indentation level by one.
|
||||
--
|
||||
-- @param i
|
||||
-- If set to a number, the indentation level will be decreased by
|
||||
-- this amount. If nil, the indentation level is decremented and
|
||||
-- no output is written. Otherwise, pass to premake.w() as the
|
||||
-- formatting string, followed by any additional arguments.
|
||||
---
|
||||
|
||||
function premake.pop(i, ...)
|
||||
if i == nil or type(i) == "number" then
|
||||
_indentLevel = _indentLevel - (i or 1)
|
||||
else
|
||||
_indentLevel = _indentLevel - 1
|
||||
p.w(i, ...)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a formatted string to the exported file, and increase the
|
||||
-- indentation level by one.
|
||||
--
|
||||
-- @param i
|
||||
-- If set to a number, the indentation level will be increased by
|
||||
-- this amount. If nil, the indentation level is incremented and
|
||||
-- no output is written. Otherwise, pass to premake.w() as the
|
||||
-- formatting string, followed by any additional arguments.
|
||||
---
|
||||
|
||||
function premake.push(i, ...)
|
||||
if i == nil or type(i) == "number" then
|
||||
_indentLevel = _indentLevel + (i or 1)
|
||||
else
|
||||
p.w(i, ...)
|
||||
_indentLevel = _indentLevel + 1
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Wrap the provided value in double quotes if it contains spaces, or
|
||||
-- if it contains a shell variable of the form $(...).
|
||||
---
|
||||
|
||||
function premake.quoted(value)
|
||||
local q = value:find(" ", 1, true)
|
||||
if not q then
|
||||
q = value:find("$%(.-%)", 1)
|
||||
end
|
||||
if q then
|
||||
value = '"' .. value .. '"'
|
||||
end
|
||||
return value
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Output a UTF-8 BOM to the exported file.
|
||||
--
|
||||
|
||||
function p.utf8()
|
||||
p.out('\239\187\191')
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a formatted string to the exported file, at the current
|
||||
-- level of indentation, and appends an end of line sequence.
|
||||
-- This gets called quite a lot, hence the very short name.
|
||||
---
|
||||
|
||||
function premake.w(...)
|
||||
if select("#", ...) > 0 then
|
||||
p.outln(string.rep(_indentString or "\t", _indentLevel) .. string.format(...))
|
||||
else
|
||||
p.outln('');
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a formatted string to the exported file, after passing all
|
||||
-- arguments (except for the first, which is the formatting string)
|
||||
-- through premake.esc().
|
||||
---
|
||||
|
||||
function premake.x(msg, ...)
|
||||
local arg = {...}
|
||||
for i = 1, #arg do
|
||||
arg[i] = p.esc(arg[i])
|
||||
end
|
||||
p.w(msg, table.unpack(arg))
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Write a opening XML element for a UTF-8 encoded file. Used by
|
||||
-- several different files for different actions, so makes sense
|
||||
-- to have a common call for it.
|
||||
--
|
||||
-- @param upper
|
||||
-- If true, the encoding is written in uppercase.
|
||||
---
|
||||
|
||||
function premake.xmlUtf8(upper)
|
||||
local encoding = iif(upper, "UTF-8", "utf-8")
|
||||
p.w('<?xml version="1.0" encoding="%s"?>', encoding)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- These are the output shortcuts that I used before switching to the
|
||||
-- indentation-aware calls above. They are still in use all over the
|
||||
-- place, including lots of community code, so let's keep them around.
|
||||
--
|
||||
-- @param i
|
||||
-- This will either be a printf-style formatting string suitable
|
||||
-- for passing to string.format(), OR an integer number indicating
|
||||
-- the desired level of indentation. If the latter, the formatting
|
||||
-- string should be the next argument in the list.
|
||||
-- @param ...
|
||||
-- The values necessary to fill out the formatting string tokens.
|
||||
--
|
||||
|
||||
function _p(i, ...)
|
||||
if type(i) == "number" then
|
||||
_indentLevel = i
|
||||
p.w(...)
|
||||
else
|
||||
_indentLevel = 0
|
||||
p.w(i, ...)
|
||||
end
|
||||
end
|
||||
|
||||
function _x(i, ...)
|
||||
local arg = {...}
|
||||
for i = 2, #arg do
|
||||
arg[i] = p.esc(arg[i])
|
||||
end
|
||||
_p(i, table.unpack(arg))
|
||||
end
|
563
Src/external_dependencies/openmpt-trunk/include/premake/src/base/project.lua
vendored
Normal file
563
Src/external_dependencies/openmpt-trunk/include/premake/src/base/project.lua
vendored
Normal file
|
@ -0,0 +1,563 @@
|
|||
---
|
||||
-- project.lua
|
||||
-- Premake project object API
|
||||
-- Author Jason Perkins
|
||||
-- Copyright (c) 2011-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.project = p.api.container("project", p.workspace, { "config" })
|
||||
|
||||
local project = p.project
|
||||
local tree = p.tree
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Alias the old external() call to the new externalproject(), to distinguish
|
||||
-- between it and externalrule().
|
||||
---
|
||||
|
||||
external = externalproject
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create a new project container instance.
|
||||
---
|
||||
|
||||
function project.new(name)
|
||||
local prj = p.container.new(project, name)
|
||||
prj.uuid = os.uuid(name)
|
||||
|
||||
if p.api.scope.group then
|
||||
prj.group = p.api.scope.group.name
|
||||
else
|
||||
prj.group = ""
|
||||
end
|
||||
|
||||
return prj
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns an iterator function for the configuration objects contained by
|
||||
-- the project. Each configuration corresponds to a build configuration/
|
||||
-- platform pair (i.e. "Debug|x86") as specified in the workspace.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project object to query.
|
||||
-- @return
|
||||
-- An iterator function returning configuration objects.
|
||||
--
|
||||
|
||||
function project.eachconfig(prj)
|
||||
local configs = prj._cfglist
|
||||
local count = #configs
|
||||
|
||||
-- Once the configurations are mapped into the workspace I could get
|
||||
-- the same one multiple times. Make sure that doesn't happen.
|
||||
local seen = {}
|
||||
|
||||
local i = 0
|
||||
return function ()
|
||||
i = i + 1
|
||||
if i <= count then
|
||||
local cfg = project.getconfig(prj, configs[i][1], configs[i][2])
|
||||
if not seen[cfg] then
|
||||
seen[cfg] = true
|
||||
return cfg
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- When an exact match is not available (project.getconfig() returns nil), use
|
||||
-- this function to find the closest alternative.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project object to query.
|
||||
-- @param buildcfg
|
||||
-- The name of the build configuration on which to filter.
|
||||
-- @param platform
|
||||
-- Optional; the name of the platform on which to filter.
|
||||
-- @return
|
||||
-- A configuration object.
|
||||
--
|
||||
|
||||
function project.findClosestMatch(prj, buildcfg, platform)
|
||||
|
||||
-- One or both of buildcfg and platform do not match any of the project
|
||||
-- configurations, otherwise I would have had an exact match. Map them
|
||||
-- separately to apply any partial rules.
|
||||
|
||||
buildcfg = project.mapconfig(prj, buildcfg)[1]
|
||||
platform = project.mapconfig(prj, platform)[1]
|
||||
|
||||
-- Replace missing values with whatever is first in the list
|
||||
|
||||
if not table.contains(prj.configurations, buildcfg) then
|
||||
buildcfg = prj.configurations[1]
|
||||
end
|
||||
|
||||
if not table.contains(prj.platforms, platform) then
|
||||
platform = prj.platforms[1]
|
||||
end
|
||||
|
||||
-- Now I should have a workable pairing
|
||||
|
||||
return project.getconfig(prj, buildcfg, platform)
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
||||
-- Retrieve the project's configuration information for a particular build
|
||||
-- configuration/platform pair.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project object to query.
|
||||
-- @param buildcfg
|
||||
-- The name of the build configuration on which to filter.
|
||||
-- @param platform
|
||||
-- Optional; the name of the platform on which to filter.
|
||||
-- @return
|
||||
-- A configuration object.
|
||||
|
||||
function project.getconfig(prj, buildcfg, platform)
|
||||
-- if no build configuration is specified, return the "root" project
|
||||
-- configurations, which includes all configuration values that
|
||||
-- weren't set with a specific configuration filter
|
||||
if not buildcfg then
|
||||
return prj
|
||||
end
|
||||
|
||||
-- apply any configuration mappings
|
||||
local pairing = project.mapconfig(prj, buildcfg, platform)
|
||||
buildcfg = pairing[1]
|
||||
platform = pairing[2]
|
||||
|
||||
-- look up and return the associated config
|
||||
local key = (buildcfg or "*") .. (platform or "")
|
||||
return prj.configs[key]
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Returns a list of sibling projects on which the specified project depends.
|
||||
-- This is used to list dependencies within a workspace. Must consider all
|
||||
-- configurations because Visual Studio does not support per-config project
|
||||
-- dependencies.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project to query.
|
||||
-- @param mode
|
||||
-- if mode == 'linkOnly', returns only siblings which are linked against (links) and skips siblings which are not (dependson).
|
||||
-- if mode == 'dependOnly' returns only siblings which are depended on (dependson) and skips siblings which are not (links).
|
||||
-- @return
|
||||
-- A list of dependent projects, as an array of project objects.
|
||||
---
|
||||
|
||||
function project.getdependencies(prj, mode)
|
||||
if not prj.dependencies then
|
||||
prj.dependencies = {}
|
||||
end
|
||||
|
||||
local m = mode or 'all'
|
||||
local result = prj.dependencies[m]
|
||||
if result then
|
||||
return result
|
||||
end
|
||||
|
||||
local function add_to_project_list(cfg, depproj, result)
|
||||
local dep = p.workspace.findproject(cfg.workspace, depproj)
|
||||
if dep and not table.contains(result, dep) then
|
||||
table.insert(result, dep)
|
||||
end
|
||||
end
|
||||
|
||||
local linkOnly = m == 'linkOnly'
|
||||
local depsOnly = m == 'dependOnly'
|
||||
|
||||
result = {}
|
||||
for cfg in project.eachconfig(prj) do
|
||||
if not depsOnly then
|
||||
for _, link in ipairs(cfg.links) do
|
||||
if link ~= prj.name then
|
||||
add_to_project_list(cfg, link, result)
|
||||
end
|
||||
end
|
||||
end
|
||||
if not linkOnly then
|
||||
for _, depproj in ipairs(cfg.dependson) do
|
||||
add_to_project_list(cfg, depproj, result)
|
||||
end
|
||||
end
|
||||
end
|
||||
prj.dependencies[m] = result
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Return the first configuration of a project, which is used in some
|
||||
-- actions to generate project-wide defaults.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project object to query.
|
||||
-- @return
|
||||
-- The first configuration in a project, as would be returned by
|
||||
-- eachconfig().
|
||||
--
|
||||
|
||||
function project.getfirstconfig(prj)
|
||||
local iter = project.eachconfig(prj)
|
||||
local first = iter()
|
||||
return first
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Return the relative path from the project to the specified file.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project object to query.
|
||||
-- @param filename
|
||||
-- The file path, or an array of file paths, to convert.
|
||||
-- @return
|
||||
-- The relative path, or array of paths, from the project to the file.
|
||||
--
|
||||
|
||||
function project.getrelative(prj, filename)
|
||||
if type(filename) == "table" then
|
||||
local result = {}
|
||||
for i, name in ipairs(filename) do
|
||||
result[i] = project.getrelative(prj, name)
|
||||
end
|
||||
return result
|
||||
else
|
||||
if filename then
|
||||
local result = filename
|
||||
if path.hasdeferredjoin(result) then
|
||||
result = path.resolvedeferredjoin(result)
|
||||
end
|
||||
return path.getrelative(prj.location, result)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Create a tree from a project's list of source files.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project to query.
|
||||
-- @param sorter
|
||||
-- An optional comparator function for the sorting pass.
|
||||
-- @return
|
||||
-- A tree object containing the source file hierarchy. Leaf nodes,
|
||||
-- representing the individual files, are file configuration
|
||||
-- objects.
|
||||
--
|
||||
|
||||
function project.getsourcetree(prj, sorter)
|
||||
|
||||
if prj._.sourcetree then
|
||||
return prj._.sourcetree
|
||||
end
|
||||
|
||||
local tr = tree.new(prj.name)
|
||||
|
||||
table.foreachi(prj._.files, function(fcfg)
|
||||
-- if the file is a generated file, we add those in a second pass.
|
||||
if fcfg.generated then
|
||||
return;
|
||||
end
|
||||
|
||||
-- The tree represents the logical source code tree to be displayed
|
||||
-- in the IDE, not the physical organization of the file system. So
|
||||
-- virtual paths are used when adding nodes.
|
||||
|
||||
-- If the project script specifies a virtual path for a file, disable
|
||||
-- the logic that could trim out empty root nodes from that path. If
|
||||
-- the script writer wants an empty root node they should get it.
|
||||
|
||||
local flags
|
||||
if fcfg.vpath ~= fcfg.relpath then
|
||||
flags = { trim = false }
|
||||
end
|
||||
|
||||
-- Virtual paths can overlap, potentially putting files with the same
|
||||
-- name in the same folder, even though they have different paths on
|
||||
-- the underlying filesystem. The tree.add() call won't overwrite
|
||||
-- existing nodes, so provide the extra logic here. Start by getting
|
||||
-- the parent folder node, creating it if necessary.
|
||||
|
||||
local parent = tree.add(tr, path.getdirectory(fcfg.vpath), flags)
|
||||
local node = tree.insert(parent, tree.new(path.getname(fcfg.vpath)))
|
||||
|
||||
-- Pass through value fetches to the file configuration
|
||||
setmetatable(node, { __index = fcfg })
|
||||
end)
|
||||
|
||||
|
||||
table.foreachi(prj._.files, function(fcfg)
|
||||
-- if the file is not a generated file, we already added them
|
||||
if not fcfg.generated then
|
||||
return;
|
||||
end
|
||||
|
||||
local parent = tree.add(tr, path.getdirectory(fcfg.dependsOn.vpath))
|
||||
local node = tree.insert(parent, tree.new(path.getname(fcfg.vpath)))
|
||||
|
||||
-- Pass through value fetches to the file configuration
|
||||
setmetatable(node, { __index = fcfg })
|
||||
end)
|
||||
|
||||
tree.trimroot(tr)
|
||||
tree.sort(tr, sorter)
|
||||
|
||||
prj._.sourcetree = tr
|
||||
return tr
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Given a source file path, return a corresponding virtual path based on
|
||||
-- the vpath entries in the project. If no matching vpath entry is found,
|
||||
-- the original path is returned.
|
||||
--
|
||||
|
||||
function project.getvpath(prj, abspath)
|
||||
-- If there is no match, the result is the original filename
|
||||
local vpath = abspath
|
||||
|
||||
-- The file's name must be maintained in the resulting path; use these
|
||||
-- to make sure I don't cut off too much
|
||||
|
||||
local fname = path.getname(abspath)
|
||||
local max = abspath:len() - fname:len()
|
||||
|
||||
-- Look for matching patterns. Virtual paths are stored as an array
|
||||
-- for tables, each table continuing the path key, which looks up the
|
||||
-- array of paths with should match against that path.
|
||||
|
||||
for _, vpaths in ipairs(prj.vpaths) do
|
||||
for replacement, patterns in pairs(vpaths) do
|
||||
for _, pattern in ipairs(patterns) do
|
||||
local i = abspath:find(path.wildcards(pattern))
|
||||
if i == 1 then
|
||||
|
||||
-- Trim out the part of the name that matched the pattern; what's
|
||||
-- left is the part that gets appended to the replacement to make
|
||||
-- the virtual path. So a pattern like "src/**.h" matching the
|
||||
-- file src/include/hello.h, I want to trim out the src/ part,
|
||||
-- leaving include/hello.h.
|
||||
|
||||
-- Find out where the wildcard appears in the match. If there is
|
||||
-- no wildcard, the match includes the entire pattern
|
||||
|
||||
i = pattern:find("*", 1, true) or (pattern:len() + 1)
|
||||
|
||||
-- Trim, taking care to keep the actual file name intact.
|
||||
|
||||
local leaf
|
||||
if i < max then
|
||||
leaf = abspath:sub(i)
|
||||
else
|
||||
leaf = fname
|
||||
end
|
||||
|
||||
if leaf:startswith("/") then
|
||||
leaf = leaf:sub(2)
|
||||
end
|
||||
|
||||
-- check for (and remove) stars in the replacement pattern.
|
||||
-- If there are none, then trim all path info from the leaf
|
||||
-- and use just the filename in the replacement (stars should
|
||||
-- really only appear at the end; I'm cheating here)
|
||||
|
||||
local stem = ""
|
||||
if replacement:len() > 0 then
|
||||
stem, stars = replacement:gsub("%*", "")
|
||||
if stars == 0 then
|
||||
leaf = path.getname(leaf)
|
||||
end
|
||||
else
|
||||
leaf = path.getname(leaf)
|
||||
end
|
||||
|
||||
vpath = path.join(stem, leaf)
|
||||
return vpath
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return vpath
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines if project contains a configuration meeting certain criteria.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project to query.
|
||||
-- @param func
|
||||
-- A test function. Takes a project configuration as an argument and
|
||||
-- returns a boolean result of the test.
|
||||
-- @return
|
||||
-- True if the test function returned true.
|
||||
--
|
||||
|
||||
function project.hasConfig(prj, func)
|
||||
for cfg in project.eachconfig(prj) do
|
||||
if func(cfg) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines if a project contains a particular source code file.
|
||||
--
|
||||
-- @param prj
|
||||
-- The project to query.
|
||||
-- @param filename
|
||||
-- The absolute path to the source code file being checked.
|
||||
-- @return
|
||||
-- True if the file belongs to the project, in any configuration.
|
||||
--
|
||||
|
||||
function project.hasfile(prj, filename)
|
||||
return (prj._.files[filename] ~= nil)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the project uses a .NET language.
|
||||
--
|
||||
|
||||
function project.isdotnet(prj)
|
||||
return
|
||||
p.languages.iscsharp(prj.language) or
|
||||
p.languages.isfsharp(prj.language)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the project uses a C# language.
|
||||
--
|
||||
|
||||
function project.iscsharp(prj)
|
||||
return p.languages.iscsharp(prj.language)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the project uses a F# language.
|
||||
--
|
||||
|
||||
function project.isfsharp(prj)
|
||||
return p.languages.isfsharp(prj.language)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the project uses a cpp language.
|
||||
--
|
||||
|
||||
function project.isc(prj)
|
||||
return p.languages.isc(prj.language)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the project uses a cpp language.
|
||||
--
|
||||
|
||||
function project.iscpp(prj)
|
||||
return p.languages.iscpp(prj.language)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the project has uses any 'native' languages.
|
||||
-- which is basically anything other then .net at this point.
|
||||
-- modules like the dlang should overload this to add 'project.isd(prj)' to it.
|
||||
--
|
||||
function project.isnative(prj)
|
||||
return project.isc(prj) or project.iscpp(prj)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Given a build config/platform pairing, applies any project configuration maps
|
||||
-- and returns a new (or the same) pairing.
|
||||
--
|
||||
-- TODO: I think this could be made much simpler by building a string pattern
|
||||
-- like :part1:part2: and then doing string comparisions, instead of trying to
|
||||
-- iterate over variable number of table elements.
|
||||
--
|
||||
|
||||
function project.mapconfig(prj, buildcfg, platform)
|
||||
local pairing = { buildcfg, platform }
|
||||
|
||||
local testpattern = function(pattern, pairing, i)
|
||||
local j = 1
|
||||
while i <= #pairing and j <= #pattern do
|
||||
local wd = path.wildcards(pattern[j])
|
||||
if pairing[i]:match(wd) ~= pairing[i] then
|
||||
return false
|
||||
end
|
||||
i = i + 1
|
||||
j = j + 1
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local maps = prj.configmap or {}
|
||||
for mi = 1, #maps do
|
||||
for pattern, replacements in pairs(maps[mi]) do
|
||||
if type(pattern) ~= "table" then
|
||||
pattern = { pattern }
|
||||
end
|
||||
|
||||
-- does this pattern match any part of the pair? If so,
|
||||
-- replace it with the corresponding values
|
||||
for i = 1, #pairing do
|
||||
if testpattern(pattern, pairing, i) then
|
||||
if #pattern == 1 and #replacements == 1 then
|
||||
pairing[i] = replacements[1]
|
||||
else
|
||||
pairing = { replacements[1], replacements[2] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return pairing
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Given a project, returns requested min and max system versions.
|
||||
--
|
||||
|
||||
function project.systemversion(prj)
|
||||
if prj.systemversion ~= nil then
|
||||
local values = string.explode(prj.systemversion, ":", true)
|
||||
return values[1], values[2]
|
||||
end
|
||||
end
|
242
Src/external_dependencies/openmpt-trunk/include/premake/src/base/rule.lua
vendored
Normal file
242
Src/external_dependencies/openmpt-trunk/include/premake/src/base/rule.lua
vendored
Normal file
|
@ -0,0 +1,242 @@
|
|||
---
|
||||
-- base/rule.lua
|
||||
-- Defines rule sets for generated custom rule files.
|
||||
-- Copyright (c) 2014 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.rule = p.api.container("rule", p.global)
|
||||
|
||||
local rule = p.rule
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create a new rule container instance.
|
||||
---
|
||||
|
||||
function rule.new(name)
|
||||
local self = p.container.new(rule, name)
|
||||
|
||||
-- create a variable setting function. Do a version with lowercased
|
||||
-- first letter(s) to match Premake's naming style for other calls
|
||||
|
||||
_G[name .. "Vars"] = function(vars)
|
||||
rule.setVars(self, vars)
|
||||
end
|
||||
|
||||
local lowerName = name:gsub("^%u+", string.lower)
|
||||
_G[lowerName .. "Vars"] = _G[name .. "Vars"]
|
||||
|
||||
return self
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Enumerate the property definitions for a rule.
|
||||
---
|
||||
|
||||
function rule.eachProperty(self)
|
||||
local props = self.propertydefinition
|
||||
local i = 0
|
||||
return function ()
|
||||
i = i + 1
|
||||
if i <= #props then
|
||||
return props[i]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Find a property definition by its name.
|
||||
--
|
||||
-- @param name
|
||||
-- The property name.
|
||||
-- @returns
|
||||
-- The property definition if found, nil otherwise.
|
||||
---
|
||||
|
||||
function rule.getProperty(self, name)
|
||||
local props = self.propertydefinition
|
||||
for i = 1, #props do
|
||||
local prop = props[i]
|
||||
if prop.name == name then
|
||||
return prop
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Find the field definition for one this rule's properties. This field
|
||||
-- can then be used with the api.* functions to manipulate the property's
|
||||
-- values in the current configuration scope.
|
||||
--
|
||||
-- @param prop
|
||||
-- The property definition.
|
||||
-- @return
|
||||
-- The field definition for the property; this will be created if it
|
||||
-- does not already exist.
|
||||
---
|
||||
|
||||
function rule.getPropertyField(self, prop)
|
||||
if prop._field then
|
||||
return prop._field
|
||||
end
|
||||
|
||||
local kind = prop.kind or "string"
|
||||
if kind == "list" then
|
||||
kind = "list:string"
|
||||
end
|
||||
|
||||
local fld = p.field.new {
|
||||
name = "_rule_" .. self.name .. "_" .. prop.name,
|
||||
scope = "config",
|
||||
kind = kind,
|
||||
tokens = true,
|
||||
}
|
||||
|
||||
prop._field = fld
|
||||
return fld
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Set one or more rule variables in the current configuration scope.
|
||||
--
|
||||
-- @param vars
|
||||
-- A key-value list of variables to set and their corresponding values.
|
||||
---
|
||||
|
||||
function rule.setVars(self, vars)
|
||||
for key, value in pairs(vars) do
|
||||
local prop = rule.getProperty(self, key)
|
||||
if not prop then
|
||||
error (string.format("rule '%s' does not have property '%s'", self.name, key))
|
||||
end
|
||||
|
||||
local fld = rule.getPropertyField(self, prop)
|
||||
p.api.storeField(fld, value)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- prepare an environment with the rule properties as global tokens,
|
||||
-- according to the format specified.
|
||||
--
|
||||
-- @param environ
|
||||
-- The environment table to fill up
|
||||
-- @param format
|
||||
-- The formatting to be used, ie "[%s]".
|
||||
---
|
||||
function rule.createEnvironment(self, format)
|
||||
local environ = {}
|
||||
for _, def in ipairs(self.propertydefinition) do
|
||||
environ[def.name] = string.format(format, def.name)
|
||||
end
|
||||
return environ
|
||||
end
|
||||
|
||||
---
|
||||
-- prepare an table of pathVars with the rule properties as global tokens,
|
||||
-- according to the format specified.
|
||||
--
|
||||
-- @param pathVars
|
||||
-- The pathVars table to fill up
|
||||
-- @param format
|
||||
-- The formatting to be used, ie "%%(%s)".
|
||||
---
|
||||
|
||||
function rule.preparePathVars(self, pathVars, format)
|
||||
for _, def in ipairs(self.propertydefinition) do
|
||||
pathVars[def.name] = { absolute = true, token = string.format(format, def.name) }
|
||||
end
|
||||
end
|
||||
|
||||
function rule.createPathVars(self, format)
|
||||
local pathVars = {}
|
||||
rule.preparePathVars(self, pathVars, format)
|
||||
return pathVars
|
||||
end
|
||||
|
||||
function rule.prepareEnvironment(self, environ, cfg)
|
||||
local function path(cfg, value)
|
||||
cfg = cfg.project or cfg
|
||||
local dirs = path.translate(project.getrelative(cfg, value))
|
||||
|
||||
if type(dirs) == 'table' then
|
||||
dirs = table.filterempty(dirs)
|
||||
end
|
||||
|
||||
return dirs
|
||||
end
|
||||
|
||||
local function expandRuleString(prop, value)
|
||||
-- list
|
||||
if type(value) == "table" then
|
||||
if #value > 0 then
|
||||
local switch = prop.switch or ""
|
||||
if prop.separator then
|
||||
return switch .. table.concat(value, prop.separator)
|
||||
else
|
||||
return switch .. table.concat(value, " " .. switch)
|
||||
end
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
-- bool just emits the switch
|
||||
if prop.switch and type(value) == "boolean" then
|
||||
if value then
|
||||
return prop.switch
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
-- enum
|
||||
if prop.values then
|
||||
local switch = prop.switch or {}
|
||||
value = table.findKeyByValue(prop.values, value)
|
||||
if value == nil then
|
||||
return nil
|
||||
end
|
||||
return switch[value]
|
||||
end
|
||||
|
||||
-- primitive
|
||||
local switch = prop.switch or ""
|
||||
value = tostring(value)
|
||||
if #value > 0 then
|
||||
return switch .. value
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
for _, prop in ipairs(self.propertydefinition) do
|
||||
local fld = p.rule.getPropertyField(self, prop)
|
||||
local value = cfg[fld.name]
|
||||
if value ~= nil then
|
||||
|
||||
if fld.kind == "path" then
|
||||
value = path(cfg, value)
|
||||
elseif fld.kind == "list:path" then
|
||||
value = path(cfg, value)
|
||||
end
|
||||
|
||||
value = expandRuleString(prop, value)
|
||||
if value ~= nil and #value > 0 then
|
||||
environ[prop.name] = p.esc(value)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
209
Src/external_dependencies/openmpt-trunk/include/premake/src/base/semver.lua
vendored
Normal file
209
Src/external_dependencies/openmpt-trunk/include/premake/src/base/semver.lua
vendored
Normal file
|
@ -0,0 +1,209 @@
|
|||
local semver = {
|
||||
_VERSION = '1.2.1',
|
||||
_DESCRIPTION = 'semver for Lua',
|
||||
_URL = 'https://github.com/kikito/semver.lua',
|
||||
_LICENSE = [[
|
||||
MIT LICENSE
|
||||
|
||||
Copyright (c) 2015 Enrique García Cota
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of tother software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and tother permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
]]
|
||||
}
|
||||
|
||||
local function checkPositiveInteger(number, name)
|
||||
assert(number >= 0, name .. ' must be a valid positive number')
|
||||
assert(math.floor(number) == number, name .. ' must be an integer')
|
||||
end
|
||||
|
||||
local function present(value)
|
||||
return value and value ~= ''
|
||||
end
|
||||
|
||||
-- splitByDot("a.bbc.d") == {"a", "bbc", "d"}
|
||||
local function splitByDot(str)
|
||||
str = str or ""
|
||||
local t, count = {}, 0
|
||||
str:gsub("([^%.]+)", function(c)
|
||||
count = count + 1
|
||||
t[count] = c
|
||||
end)
|
||||
return t
|
||||
end
|
||||
|
||||
local function parsePrereleaseAndBuildWithSign(str)
|
||||
local prereleaseWithSign, buildWithSign = str:match("^(-[^+]+)(+.+)$")
|
||||
if not (prereleaseWithSign and buildWithSign) then
|
||||
prereleaseWithSign = str:match("^(-.+)$")
|
||||
buildWithSign = str:match("^(+.+)$")
|
||||
end
|
||||
assert(prereleaseWithSign or buildWithSign, ("The parameter %q must begin with + or - to denote a prerelease or a build"):format(str))
|
||||
return prereleaseWithSign, buildWithSign
|
||||
end
|
||||
|
||||
local function parsePrerelease(prereleaseWithSign)
|
||||
if prereleaseWithSign then
|
||||
local prerelease = prereleaseWithSign:match("^-(%w[%.%w-]*)$")
|
||||
assert(prerelease, ("The prerelease %q is not a slash followed by alphanumerics, dots and slashes"):format(prereleaseWithSign))
|
||||
return prerelease
|
||||
end
|
||||
end
|
||||
|
||||
local function parseBuild(buildWithSign)
|
||||
if buildWithSign then
|
||||
local build = buildWithSign:match("^%+(%w[%.%w-]*)$")
|
||||
assert(build, ("The build %q is not a + sign followed by alphanumerics, dots and slashes"):format(buildWithSign))
|
||||
return build
|
||||
end
|
||||
end
|
||||
|
||||
local function parsePrereleaseAndBuild(str)
|
||||
if not present(str) then return nil, nil end
|
||||
|
||||
local prereleaseWithSign, buildWithSign = parsePrereleaseAndBuildWithSign(str)
|
||||
|
||||
local prerelease = parsePrerelease(prereleaseWithSign)
|
||||
local build = parseBuild(buildWithSign)
|
||||
|
||||
return prerelease, build
|
||||
end
|
||||
|
||||
local function parseVersion(str)
|
||||
local sMajor, sMinor, sPatch, sPrereleaseAndBuild = str:match("^(%d+)%.?(%d*)%.?(%d*)(.-)$")
|
||||
assert(type(sMajor) == 'string', ("Could not extract version number(s) from %q"):format(str))
|
||||
local major, minor, patch = tonumber(sMajor), tonumber(sMinor), tonumber(sPatch)
|
||||
local prerelease, build = parsePrereleaseAndBuild(sPrereleaseAndBuild)
|
||||
return major, minor, patch, prerelease, build
|
||||
end
|
||||
|
||||
|
||||
-- return 0 if a == b, -1 if a < b, and 1 if a > b
|
||||
local function compare(a,b)
|
||||
return a == b and 0 or a < b and -1 or 1
|
||||
end
|
||||
|
||||
local function compareIds(myId, otherId)
|
||||
if myId == otherId then return 0
|
||||
elseif not myId then return -1
|
||||
elseif not otherId then return 1
|
||||
end
|
||||
|
||||
local selfNumber, otherNumber = tonumber(myId), tonumber(otherId)
|
||||
|
||||
if selfNumber and otherNumber then -- numerical comparison
|
||||
return compare(selfNumber, otherNumber)
|
||||
-- numericals are always smaller than alphanums
|
||||
elseif selfNumber then
|
||||
return -1
|
||||
elseif otherNumber then
|
||||
return 1
|
||||
else
|
||||
return compare(myId, otherId) -- alphanumerical comparison
|
||||
end
|
||||
end
|
||||
|
||||
local function smallerIdList(myIds, otherIds)
|
||||
local myLength = #myIds
|
||||
local comparison
|
||||
|
||||
for i=1, myLength do
|
||||
comparison = compareIds(myIds[i], otherIds[i])
|
||||
if comparison ~= 0 then
|
||||
return comparison == -1
|
||||
end
|
||||
-- if comparison == 0, continue loop
|
||||
end
|
||||
|
||||
return myLength < #otherIds
|
||||
end
|
||||
|
||||
local function smallerPrerelease(mine, other)
|
||||
if mine == other or not mine then return false
|
||||
elseif not other then return true
|
||||
end
|
||||
|
||||
return smallerIdList(splitByDot(mine), splitByDot(other))
|
||||
end
|
||||
|
||||
local methods = {}
|
||||
|
||||
function methods:nextMajor()
|
||||
return semver(self.major + 1, 0, 0)
|
||||
end
|
||||
function methods:nextMinor()
|
||||
return semver(self.major, self.minor + 1, 0)
|
||||
end
|
||||
function methods:nextPatch()
|
||||
return semver(self.major, self.minor, self.patch + 1)
|
||||
end
|
||||
|
||||
local mt = { __index = methods }
|
||||
function mt:__eq(other)
|
||||
return self.major == other.major and
|
||||
self.minor == other.minor and
|
||||
self.patch == other.patch and
|
||||
self.prerelease == other.prerelease
|
||||
-- notice that build is ignored for precedence in semver 2.0.0
|
||||
end
|
||||
function mt:__lt(other)
|
||||
if self.major ~= other.major then return self.major < other.major end
|
||||
if self.minor ~= other.minor then return self.minor < other.minor end
|
||||
if self.patch ~= other.patch then return self.patch < other.patch end
|
||||
return smallerPrerelease(self.prerelease, other.prerelease)
|
||||
-- notice that build is ignored for precedence in semver 2.0.0
|
||||
end
|
||||
-- This works like the "pessimisstic operator" in Rubygems.
|
||||
-- if a and b are versions, a ^ b means "b is backwards-compatible with a"
|
||||
-- in other words, "it's safe to upgrade from a to b"
|
||||
function mt:__pow(other)
|
||||
if self.major == 0 then
|
||||
return self == other
|
||||
end
|
||||
return self.major == other.major and
|
||||
self.minor <= other.minor
|
||||
end
|
||||
function mt:__tostring()
|
||||
local buffer = { ("%d.%d.%d"):format(self.major, self.minor, self.patch) }
|
||||
if self.prerelease then table.insert(buffer, "-" .. self.prerelease) end
|
||||
if self.build then table.insert(buffer, "+" .. self.build) end
|
||||
return table.concat(buffer)
|
||||
end
|
||||
|
||||
local function new(major, minor, patch, prerelease, build)
|
||||
assert(major, "At least one parameter is needed")
|
||||
|
||||
if type(major) == 'string' then
|
||||
major,minor,patch,prerelease,build = parseVersion(major)
|
||||
end
|
||||
patch = patch or 0
|
||||
minor = minor or 0
|
||||
|
||||
checkPositiveInteger(major, "major")
|
||||
checkPositiveInteger(minor, "minor")
|
||||
checkPositiveInteger(patch, "patch")
|
||||
|
||||
local result = {major=major, minor=minor, patch=patch, prerelease=prerelease, build=build}
|
||||
return setmetatable(result, mt)
|
||||
end
|
||||
|
||||
setmetatable(semver, { __call = function(_, ...) return new(...) end })
|
||||
semver._VERSION= semver(semver._VERSION)
|
||||
|
||||
return semver
|
103
Src/external_dependencies/openmpt-trunk/include/premake/src/base/string.lua
vendored
Normal file
103
Src/external_dependencies/openmpt-trunk/include/premake/src/base/string.lua
vendored
Normal file
|
@ -0,0 +1,103 @@
|
|||
--
|
||||
-- string.lua
|
||||
-- Additions to Lua's built-in string functions.
|
||||
-- Copyright (c) 2002-2013 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
--
|
||||
-- Capitalize the first letter of the string.
|
||||
--
|
||||
|
||||
function string.capitalized(self)
|
||||
return self:gsub("^%l", string.upper)
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the string has a match for the plain specified pattern
|
||||
--
|
||||
|
||||
function string.contains(s, match)
|
||||
return string.find(s, match, 1, true) ~= nil
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns an array of strings, each of which is a substring of s
|
||||
-- formed by splitting on boundaries formed by `pattern`.
|
||||
--
|
||||
|
||||
function string.explode(s, pattern, plain, maxTokens)
|
||||
if (pattern == '') then return false end
|
||||
local pos = 0
|
||||
local arr = { }
|
||||
for st,sp in function() return s:find(pattern, pos, plain) end do
|
||||
table.insert(arr, s:sub(pos, st-1))
|
||||
pos = sp + 1
|
||||
if maxTokens ~= nil and maxTokens > 0 then
|
||||
maxTokens = maxTokens - 1
|
||||
if maxTokens == 0 then
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
table.insert(arr, s:sub(pos))
|
||||
return arr
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Find the last instance of a pattern in a string.
|
||||
--
|
||||
|
||||
function string.findlast(s, pattern, plain)
|
||||
local curr = 0
|
||||
repeat
|
||||
local next = s:find(pattern, curr + 1, plain)
|
||||
if (next) then curr = next end
|
||||
until (not next)
|
||||
if (curr > 0) then
|
||||
return curr
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Returns the number of lines of text contained by the string.
|
||||
--
|
||||
|
||||
function string.lines(s)
|
||||
local trailing, n = s:gsub('.-\n', '')
|
||||
if #trailing > 0 then
|
||||
n = n + 1
|
||||
end
|
||||
return n
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Return a plural version of a string.
|
||||
---
|
||||
|
||||
function string:plural()
|
||||
if self:endswith("y") then
|
||||
return self:sub(1, #self - 1) .. "ies"
|
||||
else
|
||||
return self .. "s"
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Returns the string escaped for Lua patterns.
|
||||
---
|
||||
|
||||
function string.escapepattern(s)
|
||||
return s:gsub("[%(%)%.%%%+%-%*%?%[%]%^%$]", "%%%0")
|
||||
end
|
602
Src/external_dependencies/openmpt-trunk/include/premake/src/base/table.lua
vendored
Normal file
602
Src/external_dependencies/openmpt-trunk/include/premake/src/base/table.lua
vendored
Normal file
|
@ -0,0 +1,602 @@
|
|||
--
|
||||
-- table.lua
|
||||
-- Additions to Lua's built-in table functions.
|
||||
-- Copyright (c) 2002-2013 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
|
||||
--
|
||||
-- Make a copy of the indexed elements of the table.
|
||||
--
|
||||
|
||||
function table.arraycopy(object)
|
||||
local result = {}
|
||||
for i, value in ipairs(object) do
|
||||
result[i] = value
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the table contains the specified value.
|
||||
--
|
||||
|
||||
function table.contains(t, value)
|
||||
for _,v in pairs(t) do
|
||||
if (v == value) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Make a shallow copy of a table
|
||||
--
|
||||
|
||||
function table.shallowcopy(object)
|
||||
local copy = {}
|
||||
for k, v in pairs(object) do
|
||||
copy[k] = v
|
||||
end
|
||||
return copy
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Make a complete copy of a table, including any child tables it contains.
|
||||
--
|
||||
|
||||
function table.deepcopy(object)
|
||||
-- keep track of already seen objects to avoid loops
|
||||
local seen = {}
|
||||
|
||||
local function copy(object)
|
||||
if type(object) ~= "table" then
|
||||
return object
|
||||
elseif seen[object] then
|
||||
return seen[object]
|
||||
end
|
||||
|
||||
local clone = {}
|
||||
seen[object] = clone
|
||||
for key, value in pairs(object) do
|
||||
clone[key] = copy(value)
|
||||
end
|
||||
|
||||
setmetatable(clone, getmetatable(object))
|
||||
return clone
|
||||
end
|
||||
|
||||
return copy(object)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Enumerates an array of objects and returns a new table containing
|
||||
-- only the value of one particular field.
|
||||
--
|
||||
|
||||
function table.extract(arr, fname)
|
||||
local result = { }
|
||||
for _,v in ipairs(arr) do
|
||||
table.insert(result, v[fname])
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Enumerates an array of objects and returns a new table containing
|
||||
-- only the values satisfying the given predicate.
|
||||
--
|
||||
|
||||
function table.filter(arr, fn)
|
||||
local result = { }
|
||||
table.foreachi(arr, function(val)
|
||||
if fn(val) then
|
||||
table.insert(result, val)
|
||||
end
|
||||
end)
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Flattens a hierarchy of tables into a single array containing all
|
||||
-- of the values.
|
||||
--
|
||||
|
||||
function table.flatten(arr)
|
||||
local result = {}
|
||||
|
||||
local function flatten(arr)
|
||||
local n = #arr
|
||||
for i = 1, n do
|
||||
local v = arr[i]
|
||||
if type(v) == "table" then
|
||||
flatten(v)
|
||||
elseif v then
|
||||
table.insert(result, v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
flatten(arr)
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Walk the elements of an array and call the specified function
|
||||
-- for each non-nil element. This works around a "feature" of the
|
||||
-- ipairs() function that stops iteration at the first nil.
|
||||
--
|
||||
-- @param arr
|
||||
-- The array to iterate.
|
||||
-- @param func
|
||||
-- The function to call. The value (not the index) will be passed
|
||||
-- as the only argument.
|
||||
--
|
||||
|
||||
function table.foreachi(arr, func)
|
||||
if arr then
|
||||
local n = #arr
|
||||
for i = 1, n do
|
||||
local v = arr[i]
|
||||
if v then
|
||||
func(v, i)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Merge two lists into an array of objects, containing pairs
|
||||
-- of values, one from each list.
|
||||
--
|
||||
|
||||
function table.fold(list1, list2)
|
||||
local result = {}
|
||||
for _, item1 in ipairs(list1 or {}) do
|
||||
if list2 and #list2 > 0 then
|
||||
for _, item2 in ipairs(list2) do
|
||||
table.insert(result, { item1, item2 })
|
||||
end
|
||||
else
|
||||
table.insert(result, { item1 })
|
||||
end
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Merges an array of items into a string.
|
||||
--
|
||||
|
||||
function table.implode(arr, before, after, between)
|
||||
local result = ""
|
||||
for _,v in ipairs(arr) do
|
||||
if (result ~= "" and between) then
|
||||
result = result .. between
|
||||
end
|
||||
result = result .. before .. v .. after
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Looks for an object within an array. Returns its index if found,
|
||||
-- or nil if the object could not be found.
|
||||
--
|
||||
|
||||
function table.indexof(tbl, obj)
|
||||
for k, v in ipairs(tbl) do
|
||||
if v == obj then
|
||||
return k
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Looks for an object within a table. Returns the key if found,
|
||||
-- or nil if the object could not be found.
|
||||
--
|
||||
|
||||
function table.findKeyByValue(tbl, obj)
|
||||
for k, v in pairs(tbl) do
|
||||
if v == obj then
|
||||
return k
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Insert a new value into a table in the position after the specified
|
||||
-- existing value. If the specified value does not exist in the table,
|
||||
-- the new value is appended to the end of the table.
|
||||
--
|
||||
-- @param tbl
|
||||
-- The table in which to insert.
|
||||
-- @param after
|
||||
-- The existing value to insert after.
|
||||
-- @param value
|
||||
-- The new value to insert.
|
||||
--
|
||||
|
||||
function table.insertafter(tbl, after, value)
|
||||
local i = table.indexof(tbl, after)
|
||||
if i then
|
||||
table.insert(tbl, i + 1, value)
|
||||
else
|
||||
table.insert(tbl, value)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Inserts a value or array of values into a table. If the value is
|
||||
-- itself a table, its contents are enumerated and added instead. So
|
||||
-- these inputs give these outputs:
|
||||
--
|
||||
-- "x" -> { "x" }
|
||||
-- { "x", "y" } -> { "x", "y" }
|
||||
-- { "x", { "y" }} -> { "x", "y" }
|
||||
--
|
||||
|
||||
function table.insertflat(tbl, values)
|
||||
if values == nil then
|
||||
return
|
||||
elseif type(values) == "table" then
|
||||
for _, value in ipairs(values) do
|
||||
table.insertflat(tbl, value)
|
||||
end
|
||||
else
|
||||
table.insert(tbl, values)
|
||||
end
|
||||
return tbl
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Inserts a value into a table as both a list item and a key-value pair.
|
||||
-- Useful for set operations. Returns false if the value already exists, true otherwise.
|
||||
--
|
||||
|
||||
function table.insertkeyed(tbl, pos, value)
|
||||
if value == nil then
|
||||
value = pos
|
||||
pos = #tbl + 1
|
||||
end
|
||||
|
||||
if tbl[value] ~= nil then
|
||||
return false
|
||||
end
|
||||
|
||||
table.insert(tbl, pos, value)
|
||||
tbl[value] = value
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Inserts a value into a table in sorted order. Assumes that the
|
||||
-- table is already sorted according to the sort function. If fn is
|
||||
-- nil, the table is sorted according to the < operator.
|
||||
--
|
||||
|
||||
function table.insertsorted(tbl, value, fn)
|
||||
if value == nil then
|
||||
return
|
||||
else
|
||||
fn = fn or function(a, b) return a < b end
|
||||
|
||||
local minindex = 1
|
||||
local maxindex = #tbl + 1
|
||||
while minindex < maxindex do
|
||||
local index = minindex + ((maxindex - minindex) >> 1)
|
||||
local test = tbl[index]
|
||||
if fn(value, test) then
|
||||
maxindex = index
|
||||
else
|
||||
minindex = index + 1
|
||||
if not fn(test, value) then
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(tbl, minindex, value)
|
||||
end
|
||||
|
||||
return tbl
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns true if the table is empty, and contains no indexed or keyed values.
|
||||
--
|
||||
|
||||
function table.isempty(t)
|
||||
return next(t) == nil
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Adds the values from one array to the end of another and
|
||||
-- returns the result.
|
||||
--
|
||||
|
||||
function table.join(...)
|
||||
local result = { }
|
||||
local arg = {...}
|
||||
for _,t in ipairs(arg) do
|
||||
if type(t) == "table" then
|
||||
for _,v in ipairs(t) do
|
||||
table.insert(result, v)
|
||||
end
|
||||
else
|
||||
table.insert(result, t)
|
||||
end
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Return a list of all keys used in a table.
|
||||
--
|
||||
|
||||
function table.keys(tbl)
|
||||
local keys = {}
|
||||
for k, _ in pairs(tbl) do
|
||||
table.insert(keys, k)
|
||||
end
|
||||
return keys
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Adds the key-value associations from one table into another
|
||||
-- and returns the resulting merged table.
|
||||
--
|
||||
|
||||
function table.merge(...)
|
||||
local result = {}
|
||||
local arg = {...}
|
||||
for _,t in ipairs(arg) do
|
||||
|
||||
if type(t) == "table" then
|
||||
for k,v in pairs(t) do
|
||||
if type(result[k]) == "table" and type(v) == "table" then
|
||||
result[k] = table.merge(result[k], v)
|
||||
else
|
||||
result[k] = v
|
||||
end
|
||||
end
|
||||
|
||||
else
|
||||
error("invalid value")
|
||||
end
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
---
|
||||
-- Replace all instances of `value` with `replacement` in an array. Array
|
||||
-- elements are modified in place.
|
||||
--
|
||||
-- @param value
|
||||
-- The value to be replaced.
|
||||
-- @param replacement
|
||||
-- The new value.
|
||||
---
|
||||
|
||||
function table.replace(self, value, replacement)
|
||||
for i = 1, #self do
|
||||
if self[i] == value then
|
||||
self[i] = replacement
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Translates the values contained in array, using the specified
|
||||
-- translation table, and returns the results in a new array.
|
||||
--
|
||||
|
||||
function table.translate(arr, translation)
|
||||
if not translation then return {} end
|
||||
|
||||
local result = {}
|
||||
for i = 1, #arr do
|
||||
local tvalue
|
||||
if type(translation) == "function" then
|
||||
tvalue = translation(arr[i])
|
||||
else
|
||||
tvalue = translation[arr[i]]
|
||||
end
|
||||
if (tvalue) then
|
||||
table.insert(result, tvalue)
|
||||
end
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Dumps a table to a string
|
||||
--
|
||||
function table.tostring(tab, recurse, indent)
|
||||
local res = ''
|
||||
|
||||
if not indent then
|
||||
indent = 0
|
||||
end
|
||||
|
||||
local format_value = function(k, v, i)
|
||||
formatting = string.rep("\t", i)
|
||||
|
||||
if k then
|
||||
if type(k) == "table" then
|
||||
k = '[table]'
|
||||
end
|
||||
formatting = formatting .. k .. ": "
|
||||
end
|
||||
|
||||
if not v then
|
||||
return formatting .. '(nil)'
|
||||
elseif type(v) == "table" then
|
||||
if recurse and recurse > 0 then
|
||||
return formatting .. '\n' .. table.tostring(v, recurse-1, i+1)
|
||||
else
|
||||
return formatting .. "<table>"
|
||||
end
|
||||
elseif type(v) == "function" then
|
||||
return formatting .. tostring(v)
|
||||
elseif type(v) == "userdata" then
|
||||
return formatting .. "<userdata>"
|
||||
elseif type(v) == "boolean" then
|
||||
if v then
|
||||
return formatting .. 'true'
|
||||
else
|
||||
return formatting .. 'false'
|
||||
end
|
||||
else
|
||||
return formatting .. v
|
||||
end
|
||||
end
|
||||
|
||||
if type(tab) == "table" then
|
||||
local first = true
|
||||
|
||||
-- add the meta table.
|
||||
local mt = getmetatable(tab)
|
||||
if mt then
|
||||
res = res .. format_value('__mt', mt, indent)
|
||||
first = false
|
||||
end
|
||||
|
||||
-- add all values.
|
||||
for k, v in pairs(tab) do
|
||||
if not first then
|
||||
res = res .. '\n'
|
||||
end
|
||||
|
||||
res = res .. format_value(k, v, indent)
|
||||
first = false
|
||||
end
|
||||
else
|
||||
res = res .. format_value(nil, tab, indent)
|
||||
end
|
||||
|
||||
return res
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Returns a copy of a list with all duplicate elements removed.
|
||||
--
|
||||
function table.unique(tab)
|
||||
local elems = { }
|
||||
local result = { }
|
||||
table.foreachi(tab, function(elem)
|
||||
if not elems[elem] then
|
||||
table.insert(result, elem)
|
||||
elems[elem] = true
|
||||
end
|
||||
end)
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
--
|
||||
-- Filters a table for empty entries. primarly useful for lists of string.
|
||||
--
|
||||
function table.filterempty(dirs)
|
||||
return table.translate(dirs, function(val)
|
||||
if val and #val > 0 then
|
||||
return val
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Compares two tables.
|
||||
--
|
||||
function table.equals(a, b)
|
||||
for k, v in pairs(a) do
|
||||
if b[k] ~= v then
|
||||
return false
|
||||
end
|
||||
end
|
||||
for k, v in pairs(b) do
|
||||
if a[k] ~= v then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Enumerate a table sorted by its keys.
|
||||
--
|
||||
function spairs(t)
|
||||
-- collect the keys
|
||||
local keys = {}
|
||||
for k in pairs(t) do
|
||||
table.insert(keys, k)
|
||||
end
|
||||
table.sort(keys)
|
||||
|
||||
-- return the iterator function
|
||||
local i = 0
|
||||
return function()
|
||||
i = i + 1
|
||||
if keys[i] then
|
||||
return keys[i], t[keys[i]]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Intersect two arrays and return a new array
|
||||
--
|
||||
function table.intersect(a, b)
|
||||
local result = {}
|
||||
for _, v in ipairs(b) do
|
||||
if table.indexof(a, v) then
|
||||
table.insert(result, v)
|
||||
end
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
--
|
||||
-- The difference of A and B is the set containing those elements that are in A but not in B
|
||||
--
|
||||
function table.difference(a, b)
|
||||
local result = {}
|
||||
for _, v in ipairs(a) do
|
||||
if not table.indexof(b, v) then
|
||||
table.insert(result, v)
|
||||
end
|
||||
end
|
||||
return result
|
||||
end
|
45
Src/external_dependencies/openmpt-trunk/include/premake/src/base/term.lua
vendored
Normal file
45
Src/external_dependencies/openmpt-trunk/include/premake/src/base/term.lua
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
--
|
||||
-- term.lua
|
||||
-- Additions to the 'term' namespace.
|
||||
-- Copyright (c) 2017 Blizzard Entertainment and the Premake project
|
||||
--
|
||||
|
||||
-- default colors.
|
||||
term.black = 0
|
||||
term.blue = 1
|
||||
term.green = 2
|
||||
term.cyan = 3
|
||||
term.red = 4
|
||||
term.purple = 5
|
||||
term.brown = 6
|
||||
term.lightGray = 7
|
||||
term.gray = 8
|
||||
term.lightBlue = 9
|
||||
term.lightGreen = 10
|
||||
term.lightCyan = 11
|
||||
term.lightRed = 12
|
||||
term.magenta = 13
|
||||
term.yellow = 14
|
||||
term.white = 15
|
||||
|
||||
-- colors for specific purpose.
|
||||
term.warningColor = term.magenta
|
||||
term.errorColor = term.lightRed
|
||||
term.infoColor = term.lightCyan
|
||||
|
||||
-- color stack implementation.
|
||||
term._colorStack = {}
|
||||
|
||||
function term.pushColor(color)
|
||||
local old = term.getTextColor()
|
||||
table.insert(term._colorStack, old)
|
||||
|
||||
term.setTextColor(color)
|
||||
end
|
||||
|
||||
function term.popColor()
|
||||
if #term._colorStack > 0 then
|
||||
local color = table.remove(term._colorStack)
|
||||
term.setTextColor(color)
|
||||
end
|
||||
end
|
61
Src/external_dependencies/openmpt-trunk/include/premake/src/base/tools.lua
vendored
Normal file
61
Src/external_dependencies/openmpt-trunk/include/premake/src/base/tools.lua
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
---
|
||||
-- tools.lua
|
||||
-- Work with Premake's collection of tool adapters.
|
||||
-- Author Jason Perkins
|
||||
-- Copyright (c) 2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.tools = {}
|
||||
|
||||
|
||||
---
|
||||
-- Given a toolset identifier (e.g. "gcc" or "gcc-4.8") returns the
|
||||
-- corresponding tool adapter and the version, if one was provided.
|
||||
--
|
||||
-- @param identifier
|
||||
-- A toolset identifier composed of two parts: the toolset name,
|
||||
-- which should match of the name of the adapter object ("gcc",
|
||||
-- "clang", etc.) in the p.tools namespace, and and optional
|
||||
-- version number, separated by a dash.
|
||||
--
|
||||
-- To make things more intuitive for Visual Studio users, supports
|
||||
-- identifiers like "v100" to represent the v100 Microsoft platform
|
||||
-- toolset.
|
||||
-- @return
|
||||
-- If successful, returns the toolset adapter object. If a version
|
||||
-- was specified as part of the identifier, that is returned as a
|
||||
-- second return value. If no corresponding tool adapter exists,
|
||||
-- returns nil.
|
||||
---
|
||||
|
||||
function p.tools.normalize(identifier)
|
||||
if identifier:startswith("v") then -- TODO: this should be deprecated?
|
||||
identifier = 'msc-' .. identifier
|
||||
end
|
||||
|
||||
local parts = identifier:explode("-", true, 1)
|
||||
if parts[2] == nil then
|
||||
return parts[1]
|
||||
end
|
||||
|
||||
-- 'msc-100' is accepted, but the code expects 'v100'
|
||||
if parts[1] == "msc" and tonumber(parts[2]:sub(1,3)) ~= nil then
|
||||
parts[2] = "v" .. parts[2]
|
||||
end
|
||||
|
||||
-- perform case-correction of the LLVM toolset
|
||||
if parts[2]:startswith("llvm-vs") then
|
||||
parts[2] = "LLVM-" .. parts[2]:sub(6)
|
||||
end
|
||||
|
||||
return parts[1] .. '-' .. parts[2]
|
||||
end
|
||||
|
||||
|
||||
function p.tools.canonical(identifier)
|
||||
identifier = p.tools.normalize(identifier)
|
||||
|
||||
local parts = identifier:explode("-", true, 1)
|
||||
return p.tools[parts[1]], parts[2]
|
||||
end
|
329
Src/external_dependencies/openmpt-trunk/include/premake/src/base/tree.lua
vendored
Normal file
329
Src/external_dependencies/openmpt-trunk/include/premake/src/base/tree.lua
vendored
Normal file
|
@ -0,0 +1,329 @@
|
|||
--
|
||||
-- tree.lua
|
||||
-- Functions for working with the source code tree.
|
||||
-- Copyright (c) 2009-2013 Jason Perkins and the Premake project
|
||||
--
|
||||
|
||||
local p = premake
|
||||
p.tree = {}
|
||||
local tree = p.tree
|
||||
|
||||
|
||||
--
|
||||
-- Create a new tree.
|
||||
--
|
||||
-- @param n
|
||||
-- The name of the tree, applied to the root node (optional).
|
||||
--
|
||||
|
||||
function tree.new(n)
|
||||
local t = {
|
||||
name = n,
|
||||
children = {}
|
||||
}
|
||||
return t
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Add a new node to the tree, or returns the current node if it already exists.
|
||||
--
|
||||
-- @param tr
|
||||
-- The tree to contain the new node.
|
||||
-- @param p
|
||||
-- The path of the new node.
|
||||
-- @param extraFields
|
||||
-- A table containing key-value pairs to be added to any new nodes.
|
||||
-- @returns
|
||||
-- The new tree node.
|
||||
--
|
||||
|
||||
function tree.add(tr, p, extraFields)
|
||||
-- Special case "." refers to the current node
|
||||
if p == "." or p == "/" then
|
||||
return tr
|
||||
end
|
||||
|
||||
-- Look for the immediate parent for this new node, creating it if necessary.
|
||||
-- Recurses to create as much of the tree as necessary.
|
||||
local parentnode = tree.add(tr, path.getdirectory(p), extraFields)
|
||||
|
||||
-- Create the child if necessary
|
||||
local childname = path.getname(p)
|
||||
local childnode = parentnode.children[childname]
|
||||
if not childnode or childnode.path ~= p then
|
||||
childnode = tree.insert(parentnode, tree.new(childname))
|
||||
childnode.path = p
|
||||
if extraFields then
|
||||
for k,v in pairs(extraFields) do
|
||||
childnode[k] = v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return childnode
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Insert one tree into another.
|
||||
--
|
||||
-- @param parent
|
||||
-- The parent tree, to contain the child.
|
||||
-- @param child
|
||||
-- The child tree, to be inserted.
|
||||
--
|
||||
|
||||
function tree.insert(parent, child)
|
||||
table.insert(parent.children, child)
|
||||
if child.name then
|
||||
parent.children[child.name] = child
|
||||
end
|
||||
child.parent = parent
|
||||
return child
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Gets the node's relative path from it's parent. If the parent does not have
|
||||
-- a path set (it is the root or other container node) returns the full node path.
|
||||
--
|
||||
-- @param node
|
||||
-- The node to query.
|
||||
--
|
||||
|
||||
function tree.getlocalpath(node)
|
||||
if node.parent.path then
|
||||
return node.name
|
||||
elseif node.cfg then
|
||||
return node.cfg.name
|
||||
else
|
||||
return node.path
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines if the tree contains any branch nodes, or only leaves.
|
||||
--
|
||||
-- @param tr
|
||||
-- The root node of the tree to query.
|
||||
-- @return
|
||||
-- True if a node below the root contains children, false otherwise.
|
||||
--
|
||||
|
||||
function tree.hasbranches(tr)
|
||||
local n = #tr.children
|
||||
if n > 0 then
|
||||
for i = 1, n do
|
||||
if #tr.children[i].children > 0 then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Determines if one node is a parent if another.
|
||||
--
|
||||
-- @param n
|
||||
-- The node being tested for parentage.
|
||||
-- @param child
|
||||
-- The child node being testing against.
|
||||
-- @return
|
||||
-- True if n is a parent of child.
|
||||
--
|
||||
|
||||
function tree.isparent(n, child)
|
||||
local p = child.parent
|
||||
while p do
|
||||
if p == n then
|
||||
return true
|
||||
end
|
||||
p = p.parent
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Remove a node from a tree.
|
||||
--
|
||||
-- @param node
|
||||
-- The node to remove.
|
||||
--
|
||||
|
||||
function tree.remove(node)
|
||||
local children = node.parent.children
|
||||
for i = 1, #children do
|
||||
if children[i] == node then
|
||||
table.remove(children, i)
|
||||
end
|
||||
end
|
||||
node.children = {}
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Sort the nodes of a tree in-place.
|
||||
--
|
||||
-- @param tr
|
||||
-- The tree to sort.
|
||||
-- @param fn
|
||||
-- An optional comparator function.
|
||||
--
|
||||
|
||||
function tree.sort(tr, fn)
|
||||
if not fn then
|
||||
fn = function(a,b) return a.name < b.name end
|
||||
end
|
||||
tree.traverse(tr, {
|
||||
onnode = function(node)
|
||||
table.sort(node.children, fn)
|
||||
end
|
||||
}, true)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Traverse a tree.
|
||||
--
|
||||
-- @param t
|
||||
-- The tree to traverse.
|
||||
-- @param fn
|
||||
-- A collection of callback functions, which may contain any or all of the
|
||||
-- following entries. Entries are called in this order.
|
||||
--
|
||||
-- onnode - called on each node encountered
|
||||
-- onbranchenter - called on branches, before processing children
|
||||
-- onbranch - called only on branch nodes
|
||||
-- onleaf - called only on leaf nodes
|
||||
-- onbranchexit - called on branches, after processing children
|
||||
--
|
||||
-- Callbacks receive two arguments: the node being processed, and the
|
||||
-- current traversal depth.
|
||||
--
|
||||
-- @param includeroot
|
||||
-- True to include the root node in the traversal, otherwise it will be skipped.
|
||||
-- @param initialdepth
|
||||
-- An optional starting value for the traversal depth; defaults to zero.
|
||||
--
|
||||
|
||||
function tree.traverse(t, fn, includeroot, initialdepth)
|
||||
|
||||
-- forward declare my handlers, which call each other
|
||||
local donode, dochildren
|
||||
|
||||
-- process an individual node
|
||||
donode = function(node, fn, depth)
|
||||
if node.isremoved then
|
||||
return
|
||||
end
|
||||
|
||||
if fn.onnode then
|
||||
fn.onnode(node, depth)
|
||||
end
|
||||
|
||||
if #node.children > 0 then
|
||||
if fn.onbranchenter then
|
||||
fn.onbranchenter(node, depth)
|
||||
end
|
||||
if fn.onbranch then
|
||||
fn.onbranch(node, depth)
|
||||
end
|
||||
dochildren(node, fn, depth + 1)
|
||||
if fn.onbranchexit then
|
||||
fn.onbranchexit(node, depth)
|
||||
end
|
||||
else
|
||||
if fn.onleaf then
|
||||
fn.onleaf(node, depth)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- this goofy iterator allows nodes to be removed during the traversal
|
||||
dochildren = function(parent, fn, depth)
|
||||
local i = 1
|
||||
while i <= #parent.children do
|
||||
local node = parent.children[i]
|
||||
donode(node, fn, depth)
|
||||
if node == parent.children[i] then
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- set a default initial traversal depth, if one wasn't set
|
||||
if not initialdepth then
|
||||
initialdepth = 0
|
||||
end
|
||||
|
||||
if includeroot then
|
||||
donode(t, fn, initialdepth)
|
||||
else
|
||||
dochildren(t, fn, initialdepth)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Starting at the top of the tree, remove nodes that contain only a single
|
||||
-- item until I hit a node that has multiple items. This is used to remove
|
||||
-- superfluous folders from the top of the source tree.
|
||||
--
|
||||
|
||||
function tree.trimroot(tr)
|
||||
local trimmed
|
||||
|
||||
-- start by removing single-children folders from the top of the tree
|
||||
while #tr.children == 1 do
|
||||
local node = tr.children[1]
|
||||
|
||||
-- if this node has no children (it is the last node in the tree) I'm done
|
||||
if #node.children == 0 or node.trim == false then
|
||||
break
|
||||
end
|
||||
|
||||
-- remove this node from the tree, and move its children up a level
|
||||
trimmed = true
|
||||
local numChildren = #node.children
|
||||
for i = 1, numChildren do
|
||||
local child = node.children[i]
|
||||
child.parent = node.parent
|
||||
tr.children[i] = child
|
||||
end
|
||||
end
|
||||
|
||||
-- found the top, now remove any single-children ".." folders from here
|
||||
local dotdot
|
||||
local count = #tr.children
|
||||
repeat
|
||||
dotdot = false
|
||||
for i = 1, count do
|
||||
local node = tr.children[i]
|
||||
if node.name == ".." and #node.children == 1 then
|
||||
local child = node.children[1]
|
||||
child.parent = node.parent
|
||||
tr.children[i] = child
|
||||
trimmed = true
|
||||
dotdot = true
|
||||
end
|
||||
end
|
||||
until not dotdot
|
||||
|
||||
-- if nodes were removed, adjust the paths on all remaining nodes
|
||||
if trimmed then
|
||||
tree.traverse(tr, {
|
||||
onnode = function(node)
|
||||
if node.parent.path then
|
||||
node.path = path.join(node.parent.path, node.name)
|
||||
else
|
||||
node.path = node.name
|
||||
end
|
||||
end
|
||||
}, false)
|
||||
end
|
||||
end
|
239
Src/external_dependencies/openmpt-trunk/include/premake/src/base/validation.lua
vendored
Normal file
239
Src/external_dependencies/openmpt-trunk/include/premake/src/base/validation.lua
vendored
Normal file
|
@ -0,0 +1,239 @@
|
|||
---
|
||||
-- base/validation.lua
|
||||
--
|
||||
-- Verify the contents of the project object before handing them off to
|
||||
-- the action/exporter.
|
||||
--
|
||||
-- Copyright (c) 2002-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.validation = {}
|
||||
local m = p.validation
|
||||
|
||||
m.elements = {}
|
||||
|
||||
|
||||
---
|
||||
-- Validate the global container and all of its contents.
|
||||
---
|
||||
|
||||
m.elements.global = function(glb)
|
||||
return {
|
||||
}
|
||||
end
|
||||
|
||||
function p.global.validate(self)
|
||||
p.callArray(m.elements.global, self)
|
||||
p.container.validateChildren(self)
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Validate a workspace and its projects.
|
||||
---
|
||||
|
||||
m.elements.workspace = function(wks)
|
||||
return {
|
||||
m.workspaceHasConfigs,
|
||||
m.uniqueProjectIds,
|
||||
}
|
||||
end
|
||||
|
||||
function p.workspace.validate(self)
|
||||
p.callArray(m.elements.workspace, self)
|
||||
p.container.validateChildren(self)
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Validate a project and its configurations.
|
||||
---
|
||||
|
||||
m.elements.project = function(prj)
|
||||
return {
|
||||
m.projectHasLanguage,
|
||||
m.actionSupportsLanguage,
|
||||
m.actionSupportsKind,
|
||||
m.projectRulesExist,
|
||||
m.projectValuesInScope,
|
||||
}
|
||||
end
|
||||
|
||||
function p.project.validate(self)
|
||||
p.callArray(m.elements.project, self)
|
||||
for cfg in p.project.eachconfig(self) do
|
||||
p.config.validate(cfg)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Validate a project configuration.
|
||||
---
|
||||
|
||||
m.elements.config = function(cfg)
|
||||
return {
|
||||
m.configHasKind,
|
||||
m.configSupportsKind,
|
||||
m.configValuesInScope,
|
||||
}
|
||||
end
|
||||
|
||||
function p.config.validate(self)
|
||||
p.callArray(m.elements.config, self)
|
||||
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Validate a rule.
|
||||
---
|
||||
|
||||
m.elements.rule = function(rule)
|
||||
return {
|
||||
-- TODO: fill this in
|
||||
}
|
||||
end
|
||||
|
||||
function p.rule.validate(self)
|
||||
p.callArray(m.elements.rule, self)
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Check the values stored in a configuration for values that might have
|
||||
-- been set out of scope.
|
||||
--
|
||||
-- @param container
|
||||
-- The container being validated; will only check fields which are
|
||||
-- scoped to this container's class hierarchy.
|
||||
-- @param expectedScope
|
||||
-- The expected scope of values in this object, i.e. "project", "config".
|
||||
-- Values that appear unexpectedly get checked to be sure they match up
|
||||
-- with the values in the expected scope, and an error is raised if they
|
||||
-- are not the same.
|
||||
---
|
||||
|
||||
function p.config.validateScopes(self, container, expected)
|
||||
for f in p.field.each() do
|
||||
-- If this field scoped to the target container class? If not
|
||||
-- I can skip over it (config scope applies to everything).
|
||||
local scope
|
||||
for i = 1, #f.scopes do
|
||||
if f.scopes[i] == "config" or p.container.classIsA(container.class, f.scopes[i]) then
|
||||
scope = f.scopes[i]
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
local okay = (not scope or scope == "config")
|
||||
|
||||
-- Skip over fields that are at or below my expected scope.
|
||||
okay = okay or scope == expected
|
||||
|
||||
-- Skip over fields that bubble up to their parent containers anyway;
|
||||
-- these can't be out of scope for that reason
|
||||
okay = okay or p.oven.bubbledFields[f.name]
|
||||
|
||||
-- this one needs to checked
|
||||
okay = okay or p.field.compare(f, self[scope][f.name], self[f.name])
|
||||
|
||||
-- found a problem?
|
||||
if not okay then
|
||||
local key = "validate." .. f.name
|
||||
p.warnOnce(key, "'%s' on %s '%s' differs from %s '%s'; may be set out of scope", f.name, expected, self.name, scope, self[scope].name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
--
|
||||
-- Handlers for individual checks
|
||||
--
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
function m.actionSupportsKind(prj)
|
||||
if not p.action.supports(prj.kind) then
|
||||
p.warn("Unsupported kind '%s' used for project '%s'", prj.kind, prj.name)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.actionSupportsLanguage(prj)
|
||||
if not p.action.supports(prj.language) then
|
||||
p.warn("Unsupported language '%s' used for project '%s'", prj.language, prj.name)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.configHasKind(cfg)
|
||||
if not cfg.kind then
|
||||
p.error("Project '%s' needs a kind in configuration '%s'", cfg.project.name, cfg.name)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.configSupportsKind(cfg)
|
||||
if not p.action.supports(cfg.kind) then
|
||||
p.warn("Unsupported kind '%s' used in configuration '%s'", cfg.kind, cfg.name)
|
||||
end
|
||||
|
||||
-- makefile configuration can only appear in C++ projects; this is the
|
||||
-- default now, so should only be a problem if overridden.
|
||||
if (cfg.kind == p.MAKEFILE or cfg.kind == p.NONE) and p.project.isdotnet(cfg.project) then
|
||||
p.error("Project '%s' uses '%s' kind in configuration '%s'; language must not be C#", cfg.project.name, cfg.kind, cfg.name)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.configValuesInScope(cfg)
|
||||
p.config.validateScopes(cfg, cfg.project, "config")
|
||||
end
|
||||
|
||||
|
||||
function m.projectHasLanguage(prj)
|
||||
if not prj.language then
|
||||
p.error("project '%s' does not have a language", prj.name)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.projectRulesExist(prj)
|
||||
for i = 1, #prj.rules do
|
||||
local rule = prj.rules[i]
|
||||
if not p.global.getRule(rule) then
|
||||
p.error("project '%s' uses missing rule '%s'", prj.name, rule)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.projectValuesInScope(prj)
|
||||
p.config.validateScopes(prj, prj, "project")
|
||||
end
|
||||
|
||||
|
||||
function m.uniqueProjectIds(wks)
|
||||
local uuids = {}
|
||||
for prj in p.workspace.eachproject(wks) do
|
||||
if uuids[prj.uuid] then
|
||||
p.error("projects '%s' and '%s' have the same UUID", uuids[prj.uuid], prj.name)
|
||||
end
|
||||
uuids[prj.uuid] = prj.name
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function m.workspaceHasConfigs(wks)
|
||||
if not wks.configurations or #wks.configurations == 0 then
|
||||
p.error("workspace '%s' does not contain any configurations", wks.name)
|
||||
end
|
||||
end
|
202
Src/external_dependencies/openmpt-trunk/include/premake/src/base/workspace.lua
vendored
Normal file
202
Src/external_dependencies/openmpt-trunk/include/premake/src/base/workspace.lua
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
---
|
||||
-- workspace.lua
|
||||
-- Work with the list of workspaces loaded from the script.
|
||||
-- Copyright (c) 2002-2015 Jason Perkins and the Premake project
|
||||
---
|
||||
|
||||
local p = premake
|
||||
p.workspace = p.api.container("workspace", p.global)
|
||||
|
||||
local workspace = p.workspace
|
||||
|
||||
|
||||
---
|
||||
-- Switch this container's name from "solution" to "workspace"
|
||||
--
|
||||
-- We changed these names on 30 Jul 2015. While it might be nice to leave
|
||||
-- `solution()` around for Visual Studio folks and everyone still used to the
|
||||
-- old system, it would be good to eventually deprecate and remove all of
|
||||
-- the other, more internal uses of "solution" and "sln". Probably including
|
||||
-- all uses of container class aliases, since we probably aren't going to
|
||||
-- need those again (right?).
|
||||
---
|
||||
|
||||
p.solution = workspace
|
||||
workspace.alias = "solution"
|
||||
|
||||
p.alias(_G, "workspace", "solution")
|
||||
p.alias(_G, "externalworkspace", "externalsolution")
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Create a new workspace container instance.
|
||||
---
|
||||
|
||||
function workspace.new(name)
|
||||
local wks = p.container.new(workspace, name)
|
||||
return wks
|
||||
end
|
||||
|
||||
|
||||
|
||||
--
|
||||
-- Iterate over the configurations of a workspace.
|
||||
--
|
||||
-- @return
|
||||
-- A configuration iteration function.
|
||||
--
|
||||
|
||||
function workspace.eachconfig(self)
|
||||
self = p.oven.bakeWorkspace(self)
|
||||
|
||||
local i = 0
|
||||
return function()
|
||||
i = i + 1
|
||||
if i > #self.configs then
|
||||
return nil
|
||||
else
|
||||
return self.configs[i]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Iterate over the projects of a workspace.
|
||||
--
|
||||
-- @return
|
||||
-- An iterator function, returning project configurations.
|
||||
--
|
||||
|
||||
function workspace.eachproject(self)
|
||||
local i = 0
|
||||
return function ()
|
||||
i = i + 1
|
||||
if i <= #self.projects then
|
||||
return p.workspace.getproject(self, i)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Locate a project by name, case insensitive.
|
||||
--
|
||||
-- @param name
|
||||
-- The name of the projec to find.
|
||||
-- @return
|
||||
-- The project object, or nil if a matching project could not be found.
|
||||
--
|
||||
|
||||
function workspace.findproject(self, name)
|
||||
name = name:lower()
|
||||
for _, prj in ipairs(self.projects) do
|
||||
if name == prj.name:lower() then
|
||||
return prj
|
||||
end
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the tree of project groups.
|
||||
--
|
||||
-- @return
|
||||
-- The tree of project groups defined for the workspace.
|
||||
--
|
||||
|
||||
function workspace.grouptree(self)
|
||||
-- check for a previously cached tree
|
||||
if self.grouptree then
|
||||
return self.grouptree
|
||||
end
|
||||
|
||||
-- build the tree of groups
|
||||
|
||||
local tr = p.tree.new()
|
||||
for prj in workspace.eachproject(self) do
|
||||
local prjpath = path.join(prj.group, prj.name)
|
||||
local node = p.tree.add(tr, prjpath)
|
||||
node.project = prj
|
||||
end
|
||||
|
||||
-- assign UUIDs to each node in the tree
|
||||
p.tree.traverse(tr, {
|
||||
onbranch = function(node)
|
||||
node.uuid = os.uuid("group:" .. node.path)
|
||||
end
|
||||
})
|
||||
|
||||
-- sort by uuid for determinism.
|
||||
p.tree.sort(tr, function(a,b)
|
||||
return a.name < b.name
|
||||
end)
|
||||
|
||||
self.grouptree = tr
|
||||
return tr
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Retrieve the project configuration at a particular index.
|
||||
--
|
||||
-- @param idx
|
||||
-- An index into the array of projects.
|
||||
-- @return
|
||||
-- The project configuration at the given index.
|
||||
--
|
||||
|
||||
function workspace.getproject(self, idx)
|
||||
self = p.oven.bakeWorkspace(self)
|
||||
return self.projects[idx]
|
||||
end
|
||||
|
||||
|
||||
|
||||
---
|
||||
-- Determines if the workspace contains a project that meets certain criteria.
|
||||
--
|
||||
-- @param func
|
||||
-- A test function. Receives a project as its only argument and returns a
|
||||
-- boolean indicating whether it meets to matching criteria.
|
||||
-- @return
|
||||
-- True if the test function returned true.
|
||||
---
|
||||
|
||||
function workspace.hasProject(self, func)
|
||||
return p.container.hasChild(self, p.project, func)
|
||||
end
|
||||
|
||||
|
||||
--
|
||||
-- Return the relative path from the solution to the specified file.
|
||||
--
|
||||
-- @param self
|
||||
-- The workspace object to query.
|
||||
-- @param filename
|
||||
-- The file path, or an array of file paths, to convert.
|
||||
-- @return
|
||||
-- The relative path, or array of paths, from the workspace to the file.
|
||||
--
|
||||
|
||||
function workspace.getrelative(self, filename)
|
||||
if type(filename) == "table" then
|
||||
local result = {}
|
||||
for i, name in ipairs(filename) do
|
||||
if name and #name > 0 then
|
||||
table.insert(result, workspace.getrelative(self, name))
|
||||
end
|
||||
end
|
||||
return result
|
||||
else
|
||||
if filename then
|
||||
local result = filename
|
||||
if path.hasdeferredjoin(result) then
|
||||
result = path.resolvedeferredjoin(result)
|
||||
end
|
||||
return path.getrelative(self.location, result)
|
||||
end
|
||||
end
|
||||
end
|
Loading…
Add table
Add a link
Reference in a new issue