Reworked priorities

This commit is contained in:
Ryan Ward 2022-01-23 17:23:32 -05:00
parent e194a06427
commit 3fcba8825b
4 changed files with 70 additions and 47 deletions

View File

@ -28,6 +28,11 @@ Added:
Changed: Changed:
--- ---
- multi:mainloop(~~settings~~)/multi:uManager(~~settings~~) no longer takes a settings argument, that has been moved to multi:init(settings)
| Setting | Description |
---|---
print | When set to true parts of the library will print out updates otherwise no internal printing will be done
priority | When set to true, the library will prioritize different objects based on their priority
- `multi:newProcessor(name,nothread)` The new argument allows you to tell the system you won't be using the Start() and Stop() functions, rather you will handle the process yourself. Using the proc.run() function. This function needs to be called to pump the events. - `multi:newProcessor(name,nothread)` The new argument allows you to tell the system you won't be using the Start() and Stop() functions, rather you will handle the process yourself. Using the proc.run() function. This function needs to be called to pump the events.
- Processors now also use lManager instead of uManager. - Processors now also use lManager instead of uManager.
- `multi.hold(n,opt)` now supports an option table like thread.hold does. - `multi.hold(n,opt)` now supports an option table like thread.hold does.
@ -78,6 +83,10 @@ Changed:
Removed: Removed:
--- ---
- `multi:getError()` Removed when setting protect was removed
- `multi:FreeMainEvent()` The new changes with connections make's this function unnecessary
- `multi:OnMainConnect(func)` See above
- `multi:connectFinal(func)` See above
- `multi:lightloop()` Cleaned up the mainloop/uManager method, actually faster than lightloop (Which should have been called liteloop) - `multi:lightloop()` Cleaned up the mainloop/uManager method, actually faster than lightloop (Which should have been called liteloop)
- `multi:threadloop()` See above for reasons - `multi:threadloop()` See above for reasons
- `multi setting: protect` This added extra complexity to the mainloop and not much benefit. If you feel a function will error use pcall yourself. This saves a decent amount of cycles, about 6.25% increase in performance. - `multi setting: protect` This added extra complexity to the mainloop and not much benefit. If you feel a function will error use pcall yourself. This saves a decent amount of cycles, about 6.25% increase in performance.

View File

@ -149,16 +149,16 @@ function multi:getTasksDetails(t)
end end
dat = multi.AlignTable(proc_tab).. "\n" dat = multi.AlignTable(proc_tab).. "\n"
dat = dat .. "\n" .. multi.AlignTable(th_tab) dat = dat .. "\n" .. multi.AlignTable(th_tab)
return "Load on "..ProcessName[(self.Type=="process" and 1 or 2)].."<"..(self.Name or "Unnamed")..">"..": "..multi.Round(load,2).."%\nCycles Per Second Per Task: "..steps.."\nMemory Usage: "..math.ceil(collectgarbage("count")).." KB\nProcesses Running: "..process_count.."\nThreads Running: "..thread_count.."\nSystemThreads Running: "..#(multi.SystemThreads or {}).."\nPriority Scheme: "..priorityTable[multi.settings.priority or 0].."\n\n"..dat..dat2.."\n\n"..s return "Load on "..ProcessName[(self.Type=="process" and 1 or 2)].."<"..(self.Name or "Unnamed")..">"..": "..multi.Round(load,2).."%\nCycles Per Second Per Task: "..steps.."\nMemory Usage: "..math.ceil(collectgarbage("count")).." KB\nProcesses Running: "..process_count.."\nThreads Running: "..thread_count.."\nSystemThreads Running: "..#(multi.SystemThreads or {}).."\nPriority Scheme: "..priorityTable[multi.defaultSettings.priority or 0].."\n\n"..dat..dat2.."\n\n"..s
else else
return "Load on "..ProcessName[(self.Type=="process" and 1 or 2)].."<"..(self.Name or "Unnamed")..">"..": "..multi.Round(load,2).."%\nCycles Per Second Per Task: "..steps.."\n\nMemory Usage: "..math.ceil(collectgarbage("count")).." KB\nProcesses Running: "..process_count.."\nThreads Running: 0\nPriority Scheme: "..priorityTable[multi.settings.priority or 0].."\n\n"..dat2.."\n\n"..s return "Load on "..ProcessName[(self.Type=="process" and 1 or 2)].."<"..(self.Name or "Unnamed")..">"..": "..multi.Round(load,2).."%\nCycles Per Second Per Task: "..steps.."\n\nMemory Usage: "..math.ceil(collectgarbage("count")).." KB\nProcesses Running: "..process_count.."\nThreads Running: 0\nPriority Scheme: "..priorityTable[multi.defaultSettings.priority or 0].."\n\n"..dat2.."\n\n"..s
end end
else else
local load,steps = self:getLoad() local load,steps = self:getLoad()
str = { str = {
ProcessName = (self.Name or "Unnamed"), ProcessName = (self.Name or "Unnamed"),
MemoryUsage = math.ceil(collectgarbage("count")), MemoryUsage = math.ceil(collectgarbage("count")),
PriorityScheme = priorityTable[multi.settings.priority or 0], PriorityScheme = priorityTable[multi.defaultSettings.priority or 0],
SystemLoad = multi.Round(load,2), SystemLoad = multi.Round(load,2),
CyclesPerSecondPerTask = steps, CyclesPerSecondPerTask = steps,
SystemThreadCount = multi.SystemThreads and #multi.SystemThreads or 0 SystemThreadCount = multi.SystemThreads and #multi.SystemThreads or 0
@ -258,7 +258,7 @@ function multi:newConnection(protect,func,kill)
end end
end) end)
repeat repeat
self.Parent:uManager(multi.settings) self.Parent:uManager()
until self.waiting==false until self.waiting==false
id:Destroy() id:Destroy()
return self return self
@ -654,6 +654,7 @@ function multi:newEvent(task)
multi:create(c) multi:create(c)
return c return c
end end
function multi:newUpdater(skip) function multi:newUpdater(skip)
local c=self:newBase() local c=self:newBase()
c.Type='updater' c.Type='updater'
@ -674,6 +675,7 @@ function multi:newUpdater(skip)
multi:create(c) multi:create(c)
return c return c
end end
function multi:newAlarm(set) function multi:newAlarm(set)
local c=self:newBase() local c=self:newBase()
c.Type='alarm' c.Type='alarm'
@ -709,6 +711,7 @@ function multi:newAlarm(set)
multi:create(c) multi:create(c)
return c return c
end end
function multi:newLoop(func) function multi:newLoop(func)
local c=self:newBase() local c=self:newBase()
c.Type='loop' c.Type='loop'
@ -786,6 +789,7 @@ function multi:newStep(start,reset,count,skip)
multi:create(c) multi:create(c)
return c return c
end end
function multi:newTLoop(func,set) function multi:newTLoop(func,set)
local c=self:newBase() local c=self:newBase()
c.Type='tloop' c.Type='tloop'
@ -820,9 +824,11 @@ function multi:newTLoop(func,set)
multi:create(c) multi:create(c)
return c return c
end end
function multi:setTimeout(func,t) function multi:setTimeout(func,t)
multi:newThread(function() thread.sleep(t) func() end) multi:newThread(function() thread.sleep(t) func() end)
end end
function multi:newTStep(start,reset,count,set) function multi:newTStep(start,reset,count,set)
local c=self:newStep(start,reset,count) local c=self:newStep(start,reset,count)
c.Type='tstep' c.Type='tstep'
@ -903,6 +909,7 @@ local __CurrentTask
function multi.getCurrentProcess() function multi.getCurrentProcess()
return __CurrentProcess return __CurrentProcess
end end
function multi.getCurrentTask() function multi.getCurrentTask()
return __CurrentTask return __CurrentTask
end end
@ -963,9 +970,11 @@ multi.GlobalVariables={}
local dFunc = function() return true end local dFunc = function() return true end
local dRef = {nil,nil,nil,nil,nil} local dRef = {nil,nil,nil,nil,nil}
thread.requests = {} thread.requests = {}
function thread.request(t,cmd,...) function thread.request(t,cmd,...)
thread.requests[t.thread] = {cmd,{...}} thread.requests[t.thread] = {cmd,{...}}
end end
function thread.getRunningThread() function thread.getRunningThread()
local threads = globalThreads local threads = globalThreads
local t = coroutine.running() local t = coroutine.running()
@ -977,6 +986,7 @@ function thread.getRunningThread()
end end
end end
end end
function thread._Requests() function thread._Requests()
local t = thread.requests[coroutine.running()] local t = thread.requests[coroutine.running()]
if t then if t then
@ -985,6 +995,7 @@ function thread._Requests()
thread[cmd](unpack(args)) thread[cmd](unpack(args))
end end
end end
function thread.sleep(n) function thread.sleep(n)
thread._Requests() thread._Requests()
thread.getRunningThread().lastSleep = clock() thread.getRunningThread().lastSleep = clock()
@ -1026,6 +1037,7 @@ function thread.hold(n,opt)
return coroutine.yield(dRef) return coroutine.yield(dRef)
end end
end end
function thread.holdFor(sec,n) function thread.holdFor(sec,n)
thread._Requests() thread._Requests()
dRef[1] = "_holdF_" dRef[1] = "_holdF_"
@ -1033,6 +1045,7 @@ function thread.holdFor(sec,n)
dRef[3] = n or dFunc dRef[3] = n or dFunc
return coroutine.yield(dRef) return coroutine.yield(dRef)
end end
function thread.holdWithin(skip,n) function thread.holdWithin(skip,n)
thread._Requests() thread._Requests()
dRef[1] = "_holdW_" dRef[1] = "_holdW_"
@ -1040,21 +1053,25 @@ function thread.holdWithin(skip,n)
dRef[3] = n or dFunc dRef[3] = n or dFunc
return coroutine.yield(dRef) return coroutine.yield(dRef)
end end
function thread.skip(n) function thread.skip(n)
thread._Requests() thread._Requests()
dRef[1] = "_skip_" dRef[1] = "_skip_"
dRef[2] = n or 1 dRef[2] = n or 1
return coroutine.yield(dRef) return coroutine.yield(dRef)
end end
function thread.kill() function thread.kill()
dRef[1] = "_kill_" dRef[1] = "_kill_"
dRef[2] = "T_T" dRef[2] = "T_T"
return coroutine.yield(dRef) return coroutine.yield(dRef)
end end
function thread.yield() function thread.yield()
thread._Requests() thread._Requests()
return thread.sleep(0) return thread.sleep(0)
end end
function thread.isThread() function thread.isThread()
if _VERSION~="Lua 5.1" then if _VERSION~="Lua 5.1" then
local a,b = coroutine.running() local a,b = coroutine.running()
@ -1063,20 +1080,25 @@ function thread.isThread()
return coroutine.running()~=nil return coroutine.running()~=nil
end end
end end
function thread.getCores() function thread.getCores()
return thread.__CORES return thread.__CORES
end end
function thread.set(name,val) function thread.set(name,val)
multi.GlobalVariables[name]=val multi.GlobalVariables[name]=val
return true return true
end end
function thread.get(name) function thread.get(name)
return multi.GlobalVariables[name] return multi.GlobalVariables[name]
end end
function thread.waitFor(name) function thread.waitFor(name)
thread.hold(function() return thread.get(name)~=nil end) thread.hold(function() return thread.get(name)~=nil end)
return thread.get(name) return thread.get(name)
end end
function multi.hold(func,opt) function multi.hold(func,opt)
if thread.isThread() then if thread.isThread() then
if type(func) == "function" or type(func) == "table" then if type(func) == "function" or type(func) == "table" then
@ -1209,6 +1231,7 @@ function thread:newFunctionBase(generator,holdme)
return tfunc return tfunc
end end
end end
function thread:newFunction(func,holdme) function thread:newFunction(func,holdme)
return thread:newFunctionBase(function(...) return thread:newFunctionBase(function(...)
return multi.getCurrentProcess():newThread("TempThread",func,...) return multi.getCurrentProcess():newThread("TempThread",func,...)
@ -1225,6 +1248,7 @@ end
function multi:attachScheduler() function multi:attachScheduler()
local threads = {} local threads = {}
self.threadsRef = threads self.threadsRef = threads
function self:newThread(name,func,...) function self:newThread(name,func,...)
self.OnLoad:Fire() self.OnLoad:Fire()
local func = func or name local func = func or name
@ -1248,9 +1272,11 @@ function multi:attachScheduler()
c.isError = false c.isError = false
c.OnError = self:newConnection(true,nil,true) c.OnError = self:newConnection(true,nil,true)
c.OnDeath = self:newConnection(true,nil,true) c.OnDeath = self:newConnection(true,nil,true)
function c:isPaused() function c:isPaused()
return self._isPaused return self._isPaused
end end
local resumed = false local resumed = false
function c:Pause() function c:Pause()
if not self._isPaused then if not self._isPaused then
@ -1265,22 +1291,27 @@ function multi:attachScheduler()
end end
return self return self
end end
function c:Resume() function c:Resume()
resumed = true resumed = true
return self return self
end end
function c:Kill() function c:Kill()
thread.request(self,"kill") thread.request(self,"kill")
return self return self
end end
c.Destroy = c.Kill c.Destroy = c.Kill
c.kill = c.Kill c.kill = c.Kill
function c.ref:send(name,val) function c.ref:send(name,val)
ret=coroutine.yield({Name=name,Value=val}) ret=coroutine.yield({Name=name,Value=val})
end end
function c.ref:get(name) function c.ref:get(name)
return self.Globals[name] return self.Globals[name]
end end
function c.ref:kill() function c.ref:kill()
dRef[1] = "_kill_" dRef[1] = "_kill_"
dRef[2] = "I Was killed by You!" dRef[2] = "I Was killed by You!"
@ -1289,6 +1320,7 @@ function multi:attachScheduler()
error("Failed to kill a thread! Exiting...") error("Failed to kill a thread! Exiting...")
end end
end end
function c.ref:sleep(n) function c.ref:sleep(n)
if type(n)=="function" then if type(n)=="function" then
ret=thread.hold(n) ret=thread.hold(n)
@ -1298,9 +1330,11 @@ function multi:attachScheduler()
error("Invalid Type for sleep!") error("Invalid Type for sleep!")
end end
end end
function c.ref:syncGlobals(v) function c.ref:syncGlobals(v)
self.Globals=v self.Globals=v
end end
table.insert(threads,c) table.insert(threads,c)
globalThreads[c] = self globalThreads[c] = self
if initT==false then if initT==false then
@ -1311,6 +1345,7 @@ function multi:attachScheduler()
multi:create(c) multi:create(c)
return c return c
end end
function self:newISOThread(name,func,_env,...) function self:newISOThread(name,func,_env,...)
self.OnLoad:Fire() self.OnLoad:Fire()
local func = func or name local func = func or name
@ -1327,6 +1362,7 @@ function multi:attachScheduler()
local func = isolateFunction(func,env) local func = isolateFunction(func,env)
return self:newThread(name,func) return self:newThread(name,func)
end end
function self.initThreads(justThreads) function self.initThreads(justThreads)
initT = true initT = true
self.scheduler=self:newLoop():setName("multi.thread") self.scheduler=self:newLoop():setName("multi.thread")
@ -1524,6 +1560,7 @@ function multi:attachScheduler()
end end
end end
end end
function multi:newService(func) -- Priority managed threads function multi:newService(func) -- Priority managed threads
local c = {} local c = {}
c.Type = "service" c.Type = "service"
@ -1617,6 +1654,7 @@ function multi:newService(func) -- Priority managed threads
multi.create(multi,c) multi.create(multi,c)
return c return c
end end
-- Multi runners -- Multi runners
local function mainloop(self) local function mainloop(self)
__CurrentProcess = self __CurrentProcess = self
@ -1640,6 +1678,7 @@ local function mainloop(self)
return nil, "Already Running!" return nil, "Already Running!"
end end
end end
multi.mainloop = mainloop multi.mainloop = mainloop
local function p_mainloop(self) local function p_mainloop(self)
@ -1668,6 +1707,7 @@ local function p_mainloop(self)
return nil, "Already Running!" return nil, "Already Running!"
end end
end end
local init = false local init = false
function multi.init(settings, realsettings) function multi.init(settings, realsettings)
if settings == multi then settings = realsettings end if settings == multi then settings = realsettings end
@ -1684,7 +1724,7 @@ function multi.init(settings, realsettings)
return _G["$multi"].multi,_G["$multi"].thread return _G["$multi"].multi,_G["$multi"].thread
end end
function multi:uManager(settings) function multi:uManager()
if self.Active then if self.Active then
__CurrentProcess = self __CurrentProcess = self
multi.OnPreLoad:Fire() multi.OnPreLoad:Fire()
@ -1739,9 +1779,11 @@ function table.merge(t1, t2)
end end
return t1 return t1
end end
if table.unpack and not unpack then if table.unpack and not unpack then
unpack=table.unpack unpack=table.unpack
end end
multi.DestroyedObj = { multi.DestroyedObj = {
Type = "destroyed", Type = "destroyed",
} }
@ -1899,12 +1941,6 @@ function multi:canSystemThread()
return false return false
end end
function multi:getError()
if self.error then
return self.error
end
end
function multi:benchMark(sec,p,pt) function multi:benchMark(sec,p,pt)
local c = 0 local c = 0
local temp=self:newLoop(function(self,t) local temp=self:newLoop(function(self,t)
@ -1958,9 +1994,14 @@ function multi.AlignTable(tab)
end end
function multi:endTask(TID) function multi:endTask(TID)
for i=#self.Mainloop,1,-1 do
if self.Mainloop[i].TID == TID then
self.Mainloop[TID]:Destroy() self.Mainloop[TID]:Destroy()
return self return self
end end
end
return self
end
function multi:IsAnActor() function multi:IsAnActor()
return self.Act~=nil return self.Act~=nil
@ -1985,30 +2026,6 @@ function multi.timer(func,...)
return t,unpack(args) return t,unpack(args)
end end
function multi:OnMainConnect(func)
table.insert(self.func,func)
return self
end
function multi:FreeMainEvent()
self.func={}
return self
end
function multi:connectFinal(func)
if self.Type=='event' then
self:OnEvent(func)
elseif self.Type=='alarm' then
self:OnRing(func)
elseif self.Type=='step' or self.Type=='tstep' then
self:OnEnd(func)
else
multi.print("Warning!!! "..self.Type.." doesn't contain a Final Connection State! Use "..self.Type..":Break(func) to trigger it's final event!")
self:OnBreak(func)
end
return self
end
if os.getOS()=="windows" then if os.getOS()=="windows" then
thread.__CORES=tonumber(os.getenv("NUMBER_OF_PROCESSORS")) thread.__CORES=tonumber(os.getenv("NUMBER_OF_PROCESSORS"))
else else

View File

@ -1,5 +1,5 @@
package.path = "./?.lua;?/init.lua;"..package.path package.path = "./?.lua;?/init.lua;"..package.path
local multi,thread = require("multi"):init{print=true,priority=true} local multi,thread = require("multi"):init{print=true}
--local GLOBAL,THREAD = require("multi.integration.lanesManager"):init() --local GLOBAL,THREAD = require("multi.integration.lanesManager"):init()
-- func = THREAD:newFunction(function(a,b,c) -- func = THREAD:newFunction(function(a,b,c)
@ -33,18 +33,15 @@ local c = 1
local function bench(t,step) local function bench(t,step)
a = a + step a = a + step
c = c + 1 c = c + 1
if c == 5 then
--print("Total: "..a)
os.exit() os.exit()
end end
end--p_c,p_vh,p_h,p_an,p_n,p_bn,p_l,p_vl,p_i --multi:benchMark(sleep_for,multi.Priority_Idle,"Idle:"):OnBench(bench)
multi:benchMark(sleep_for,multi.Priority_Idle,"Idle:"):OnBench(bench)
--multi:benchMark(sleep_for,multi.Priority_Very_Low,"Very Low:"):OnBench(bench) --multi:benchMark(sleep_for,multi.Priority_Very_Low,"Very Low:"):OnBench(bench)
multi:benchMark(sleep_for,multi.Priority_Low,"Low:"):OnBench() --multi:benchMark(sleep_for,multi.Priority_Low,"Low:"):OnBench()
--multi:benchMark(sleep_for,multi.Priority_Below_Normal,"Below Normal:"):OnBench(bench) --multi:benchMark(sleep_for,multi.Priority_Below_Normal,"Below Normal:"):OnBench(bench)
multi:benchMark(sleep_for,multi.Priority_Normal,"Normal:"):OnBench(bench) --multi:benchMark(sleep_for,multi.Priority_Normal,"Normal:"):OnBench(bench)
--multi:benchMark(sleep_for,multi.Priority_Above_Normal,"Above Normal:"):OnBench(bench) --multi:benchMark(sleep_for,multi.Priority_Above_Normal,"Above Normal:"):OnBench(bench)
multi:benchMark(sleep_for,multi.Priority_High,"High:"):OnBench(bench) --multi:benchMark(sleep_for,multi.Priority_High,"High:"):OnBench(bench)
--multi:benchMark(sleep_for,multi.Priority_Very_High,"Very High:"):OnBench(bench) --multi:benchMark(sleep_for,multi.Priority_Very_High,"Very High:"):OnBench(bench)
multi:benchMark(sleep_for,multi.Priority_Core,"Core:"):OnBench(bench) multi:benchMark(sleep_for,multi.Priority_Core,"Core:"):OnBench(bench)
multi.OnExit(function() multi.OnExit(function()

View File

@ -29,5 +29,5 @@ runTest = thread:newFunction(function()
print(multi:getTasksDetails()) print(multi:getTasksDetails())
os.exit() os.exit()
end) end)
runTest() runTest().OnError(print)
multi:mainloop() multi:mainloop()