Explorar el Código

Fixed Elevated Batch Run

master
pb hace 2 años
padre
commit
d5c7e5ea20
Se han modificado 2 ficheros con 31 adiciones y 13 borrados
  1. 7
    7
      index.js
  2. 24
    6
      win_verse.js

+ 7
- 7
index.js Ver fichero

@@ -9,7 +9,7 @@ var Tasq = utils.Tasq
var statuslog = utils.Traq
var Traq = utils.Traq
Tasq.addlistener(statuslog.statuslog)
fs.writeFileSync('run.log', ', ' + JSON.stringify( { m : 'Hello'} ), { 'flag': 'a+' })
// fs.writeFileSync('run.log', ', ' + JSON.stringify( { m : 'Hello'} ), { 'flag': 'a+' })
Tasq.addlistener((e)=>{ fs.writeFileSync('run.log', ', ' + JSON.stringify( e ), { 'flag': 'a+' }) })
var cli = require('./cliverse')
function isWin(){ return /^win/.test(process.platform) }
@@ -413,8 +413,8 @@ var getPullTask = (repodef, branch, repoowner, errHandler, elevatedBatch, regula
initTask(task)

if(repodef.requiresElevation) {
elevatedBatch.push(shell_verse.getElevatedTaskInBatch( task ));
if(elevatedBatch.length === 1) { initTask(elevatedBatch[0]) } // PB : TDOO -- Make sure first task also has run context. May need to be moved to win_verse
elevatedBatch.push(shell_verse.getElevatedTask( task ));
return elevatedBatch[elevatedBatch.length-1]
}
else {
@@ -449,7 +449,7 @@ var getPullTask = (repodef, branch, repoowner, errHandler, elevatedBatch, regula
initTask(task)

if(repodef.requiresElevation) {
elevatedBatch.push(shell_verse.getElevatedTask( task ));
elevatedBatch.push(shell_verse.getElevatedTaskInBatch( task ));
return elevatedBatch[elevatedBatch.length-1]
}
else {
@@ -461,7 +461,7 @@ var getPullTask = (repodef, branch, repoowner, errHandler, elevatedBatch, regula
initTask(task)
if(repodef.requiresElevation) {
elevatedBatch.push(shell_verse.getElevatedTask( task ));
elevatedBatch.push(shell_verse.getElevatedTaskInBatch( task ));
return elevatedBatch[elevatedBatch.length-1]
}
else {
@@ -2371,8 +2371,7 @@ var elxr = {
var regularpulltasks = function(){ return Promise.resolve(true) }
if(def.elevated){
elevatedpulltasks = function() {
// shell_verse.getElevatedTask( t1 )
elevatedpulltasks = function() {
var eBatch = []
def.elevated.map((def) => getPullTask(def, null, null, null, eBatch))
return shell_verse.runElevatedBatch(eBatch).then(() => {
@@ -2992,7 +2991,7 @@ var __default = ((name, options)=>{

// Common baseline repos for all chess instances.
, repos : [
{ repo : 'ember-masonry-grid' /*, branch : master*/ } // Default need not be specified.
{ repo : 'ember-masonry-grid' /*, branch : master*/ } // Default need not be specified.
, { repo : 'bbhverse' }
, { repo : 'clientverse' }
, { repo : 'serververse' }
@@ -4133,6 +4132,7 @@ function generateDependencies(){
l('<Br/>cmd = ' + processedArgs._[0])
processedArgs._[1] === 'use' ? l('<Br/>using = ' + processedArgs._[2]) : null;
l('<Br/><Br/>')
// alert(fso.GetAbsolutePathName("."))
var timer = function(){
l('.');

+ 24
- 6
win_verse.js Ver fichero

@@ -13,33 +13,38 @@ var shell_verse = {
else return shell_verse.runNonElevated( taskToRun )
}

, elevatedRunner( taskToRun ){
, elevatedRunner( taskToRun, inBatch ){
// PB : TODO -- Should be called only when we are in an elevated shell that was already requested from an unelevated shell with a batch of tasks.
try {
var __runasresult = null;
return taskToRun().then((r)=>{
// PB : TODO -- Every elevation should have its own messaging file. Async writes from multiple processes are a problem here...
fs.writeFileSync('run.log', ', ' + JSON.stringify( { info : taskToRun.info, success: true }), { 'flag': 'a+' })
fs.writeFileSync('run.done', 'success') // PB : TODO -- This should be done conditionally if we are running inproc.
if(!inBatch) fs.writeFileSync('run.done', 'success') // PB : TODO -- This should be done conditionally if we are running inproc.
return __runasresult = r;
})
.catch((e) => {
fs.writeFileSync('run.log', ', ' + JSON.stringify(e), { 'flag': 'a+' })
fs.writeFileSync('run.done', 'failure')
if(!inBatch)fs.writeFileSync('run.done', 'failure')
console.error(e)
})
.finally(() => {
if(__runasresult && !__runasresult.skipped) fs.unlinkSync('run.done')
// if(__runasresult && !__runasresult.skipped) fs.unlinkSync('run.done')
})
}
catch (e) {
console.error('Error Invalid command : ' + e)
fs.writeFileSync('run.done', 'error')
if(!inBatch) fs.writeFileSync('run.done', 'error')
}
finally {
}
}
, getElevatedTask : function( taskToRun ){ return ()=>{ return shell_verse.runElevated(taskToRun) }}
, getElevatedTaskInBatch : function( taskToRun ){ return ()=>{ return shell_verse.runElevatedInBatch(taskToRun) }}
, runElevatedInBatch : ( taskToRun ) => {
if (__isElevated) return shell_verse.elevatedRunner(taskToRun, true)
else return shell_verse.requestElevation(shell_verse.elevatedRunner, taskToRun)
}
, runElevated : ( taskToRun ) => {
// Let shell_verse decide whether to Elevate Out of Proc or In Proc
@@ -63,7 +68,20 @@ var shell_verse = {
// In windows we don't need to run each task. We hand over to another shell which in elevated state rebuilds the whole batch and runs.
// Irrespective of the batch we just call runElevated once.
if (__isElevated) {
return any(batchToRun);
return any(batchToRun).then((r)=>{
// PB : TODO -- Every elevation should have its own messaging file. Async writes from multiple processes are a problem here...
// fs.writeFileSync('run.log', ', ' + JSON.stringify( { info : taskToRun.info, success: true }), { 'flag': 'a+' })
fs.writeFileSync('run.done', 'success') // PB : TODO -- This should be done conditionally if we are running inproc.
return __runasresult = r;
})
.catch((e) => {
// fs.writeFileSync('run.log', ', ' + JSON.stringify(e), { 'flag': 'a+' })
fs.writeFileSync('run.done', 'failure')
console.error(e)
})
// .finally(() => {
// if(__runasresult && !__runasresult.skipped) fs.unlinkSync('run.done')
// });
}
else {
return this.runElevated(batchToRun[0])

Cargando…
Cancelar
Guardar