Index: trunk/extensions/WikiAtHome/ApiWikiAtHome.php |
— | — | @@ -60,12 +60,12 @@ |
61 | 61 | }else{ |
62 | 62 | $job4Client = array(); |
63 | 63 | //unpack the $job_json |
64 | | - $job4Client['job_json'] = json_decode( $job->job_json ) ; |
| 64 | + $job4Client['job_json'] = json_decode( $job->job_json ) ; |
65 | 65 | //we set the job key to job_id _ sha1 |
66 | | - $job4Client['job_key'] = $job->job_id . '_'. sha1( $job->job_json ); |
67 | | - $job4Client['job_title']= $job->title; |
68 | | - $job4Client['job_ns'] = $job->ns; |
69 | | - $job4Client['job_set_id'] = $job->job_set_id; |
| 66 | + $job4Client['job_key'] = $job->job_id . '_'. sha1( $job->job_json ); |
| 67 | + $job4Client['job_title'] = $job->title; |
| 68 | + $job4Client['job_ns'] = $job->ns; |
| 69 | + $job4Client['job_set_id'] = $job->job_set_id; |
70 | 70 | |
71 | 71 | $tTitle = Title::newFromText($job->title, $job->ns); |
72 | 72 | |
— | — | @@ -84,7 +84,7 @@ |
85 | 85 | } |
86 | 86 | } |
87 | 87 | /* |
88 | | - * process the submited job: |
| 88 | + * process the submitted job: |
89 | 89 | */ |
90 | 90 | function doProccessJobKey( $job_key ){ |
91 | 91 | global $wgRequest, $wgUser; |
— | — | @@ -100,6 +100,7 @@ |
101 | 101 | } |
102 | 102 | |
103 | 103 | $jobSet = WahJobManager::getJobSetById( $job->job_set_id ); |
| 104 | + |
104 | 105 | //check if its a valid video ogg file (ffmpeg2theora --info) |
105 | 106 | $uploadedJobFile = $wgRequest->getFileTempname('file'); |
106 | 107 | $mediaMeta = wahGetMediaJsonMeta( $uploadedJobFile ); |
— | — | @@ -131,7 +132,7 @@ |
132 | 133 | //all good so far put it into the derivative temp folder by with each piece as it job_id name |
133 | 134 | //@@todo need to rework this a bit for flattening "sequences" |
134 | 135 | $fTitle = Title::newFromText( $jobSet->set_title, $jobSet->set_namespace ); |
135 | | - $file = RepoGroup::singleton()->getLocalRepo()->newFile( $fTitle ); |
| 136 | + $file = wfLocalFile( $fTitle ); |
136 | 137 | $thumbPath = $file->getThumbPath( $jobSet->set_encodekey ); |
137 | 138 | |
138 | 139 | $destTarget = $thumbPath .'/'. $job->job_order_id . '.ogg'; |
— | — | @@ -163,36 +164,10 @@ |
164 | 165 | if( !move_uploaded_file($uploadedJobFile, $destTarget) ){ |
165 | 166 | return $this->dieUsage( 'Could Not Move The Uploaded File', 'fileerror' ); |
166 | 167 | } |
167 | | - |
| 168 | + //issue the jobDone to the Manager: |
| 169 | + WahJobManager :: updateJobDone($job, $wgUser->getId()); |
168 | 170 | $dbw = &wfGetDb( DB_READ ); |
169 | | - //update the jobqueue table with job done time & user |
170 | | - $dbw->update('wah_jobqueue', |
171 | | - array( |
172 | | - 'job_done_user_id' => $wgUser->getId(), |
173 | | - 'job_done_time' => time() |
174 | | - ), |
175 | | - array( |
176 | | - 'job_id' => $job_id |
177 | | - ), |
178 | | - __METHOD__, |
179 | | - array( |
180 | | - 'LIMIT' => 1 |
181 | | - ) |
182 | | - ); |
183 | 171 | |
184 | | - // reduce job_client_count by 1 now that this client is "done" |
185 | | - $dbw->update('wah_jobset', |
186 | | - array( |
187 | | - 'set_client_count = set_client_count -1' |
188 | | - ), |
189 | | - array( |
190 | | - 'set_id' => $jobSet->set_id |
191 | | - ), |
192 | | - __METHOD__, |
193 | | - array( |
194 | | - 'LIMIT' => 1 |
195 | | - ) |
196 | | - ); |
197 | 172 | //check if its the "last" job shell out a Join command |
198 | 173 | $wjm = WahJobManager::newFromSet( $jobSet ); |
199 | 174 | $percDone = $wjm->getDonePerc(); |
— | — | @@ -204,7 +179,7 @@ |
205 | 180 | 'setdone' => false |
206 | 181 | ) |
207 | 182 | ); |
208 | | - }else if( $percDone == 1){ |
| 183 | + }else if( $percDone == 1 ){ |
209 | 184 | //all the files are "done" according to the DB: |
210 | 185 | //make sure all the files exist in the |
211 | 186 | $fileList = array(); |
— | — | @@ -262,18 +237,8 @@ |
263 | 238 | |
264 | 239 | //if the file got created tag the jobset as done: |
265 | 240 | if( is_file( $finalDestTarget )){ |
266 | | - $dbw->update('wah_jobset', |
267 | | - array( |
268 | | - 'set_done_time' => time() |
269 | | - ), |
270 | | - array( |
271 | | - 'set_id' => $jobSet->set_id |
272 | | - ), |
273 | | - __METHOD__, |
274 | | - array( |
275 | | - 'LIMIT' => 1 |
276 | | - ) |
277 | | - ); |
| 241 | + //update jobSet done: |
| 242 | + WahJobManager :: updateSetDone( $jobSet ); |
278 | 243 | //send out stream done |
279 | 244 | return $this->getResult()->addValue( null, $this->getModuleName(), |
280 | 245 | array( |
Index: trunk/extensions/WikiAtHome/WikiAtHome.i18n.php |
— | — | @@ -25,8 +25,8 @@ |
26 | 26 | 'wah-long-general' => '(media file, length $2, $3)', |
27 | 27 | 'wah-long-error' => '(ffmpeg could not read this file: $1)', |
28 | 28 | |
29 | | - 'wah-transcode-working' => 'This video is being transcoded its $1% done', |
30 | | - 'wah-transcode-helpout' => 'You can help transcode this video by visiting [[Special:WikiAtHome|Wiki@Home]]', |
| 29 | + 'wah-transcode-working' => 'This video is being processed, please try again later', |
| 30 | + 'wah-transcode-helpout' => 'The clip is $1 percent done. You can help transcode this video by visiting [[Special:WikiAtHome|Wiki@Home]]', |
31 | 31 | |
32 | 32 | 'wah-transcode-fail' => 'This file failed to transcode.', |
33 | 33 | |
Index: trunk/extensions/WikiAtHome/NonFreeVideoHandler.php |
— | — | @@ -336,12 +336,17 @@ |
337 | 337 | } |
338 | 338 | |
339 | 339 | function toHtml( $options = array() ) { |
| 340 | + global $wgJobTypeConfig; |
| 341 | + |
340 | 342 | wfLoadExtensionMessages( 'WikiAtHome' ); |
341 | 343 | if( $this->percDone == -1){ |
342 | 344 | $waitHtml = wfMsgWikiHtml( 'wah-transcode-fail'); |
343 | 345 | }else{ |
344 | | - $waitHtml = time() . wfMsgWikiHtml( 'wah-transcode-working', $this->percDone ) . "<br>" . |
345 | | - wfMsgWikiHtml('wah-transcode-helpout'); |
| 346 | + $waitHtml = wfMsgWikiHtml( 'wah-transcode-working' ) . "<br>"; |
| 347 | + //check if we doing it ~at~ home then we know how done it is: |
| 348 | + if( $wgJobTypeConfig['transcode'][ 'assignAtHome' ] ){ |
| 349 | + $waitHtml += wfMsgWikiHtml('wah-transcode-helpout', $this->percDone); |
| 350 | + } |
346 | 351 | } |
347 | 352 | |
348 | 353 | //@@this is just a placeholder |
Index: trunk/extensions/WikiAtHome/WikiAtHome.php |
— | — | @@ -58,6 +58,7 @@ |
59 | 59 | 'descriptionmsg' => 'wah-desc', |
60 | 60 | ); |
61 | 61 | |
| 62 | + |
62 | 63 | /* |
63 | 64 | * Main WikiAtHome Class hold some constants and config values |
64 | 65 | * |
— | — | @@ -66,15 +67,91 @@ |
67 | 68 | const ENC_SAVE_BANDWITH = '256_200kbs'; |
68 | 69 | const ENC_WEB_STREAM = '400_300kbs'; |
69 | 70 | const ENC_HQ_STREAM = 'high_quality'; |
| 71 | + |
| 72 | + /* |
| 73 | + * the mapping between firefogg api and ffmpeg2theora command line |
| 74 | + * (this way shell command to ffmpeg2theora and firefogg can share a common api) |
| 75 | + * also see: http://firefogg.org/dev/index.html |
| 76 | + */ |
| 77 | + var $foggMap = array( |
| 78 | + //video |
| 79 | + 'width' => "--width", |
| 80 | + 'height' => "--height", |
| 81 | + 'maxSize' => "--max_size", |
| 82 | + 'noUpscaling' => "--no-upscaling", |
| 83 | + 'videoQuality' => "-v", |
| 84 | + 'videoBitrate' => "-V", |
| 85 | + 'framerate' => "-F", |
| 86 | + 'aspect' => "--aspect", |
| 87 | + 'starttime' => "--starttime", |
| 88 | + 'endtime' => "--endtime", |
| 89 | + 'cropTop' => "--croptop", |
| 90 | + 'cropBottom' => "--cropbottom", |
| 91 | + 'cropLeft' => "--cropleft", |
| 92 | + 'cropRight' => "--cropright", |
| 93 | + 'keyframeInterval' => "--key", |
| 94 | + 'denoise' => array("--pp", "de"), |
| 95 | + 'novideo' => array("--novideo", "--no-skeleton"), |
| 96 | + |
| 97 | + //audio |
| 98 | + 'audioQuality' => "-a", |
| 99 | + 'audioBitrate' => "-A", |
| 100 | + 'samplerate' => "-H", |
| 101 | + 'channels' => "-c", |
| 102 | + 'noaudio' => "--noaudio", |
| 103 | + |
| 104 | + //metadata |
| 105 | + 'artist' => "--artist", |
| 106 | + 'title' => "--title", |
| 107 | + 'date' => "--date", |
| 108 | + 'location' => "--location", |
| 109 | + 'organization' => "--organization", |
| 110 | + 'copyright' => "--copyright", |
| 111 | + 'license' => "--license", |
| 112 | + 'contact' => "--contact" |
| 113 | + ); |
70 | 114 | } |
71 | 115 | |
72 | 116 | //GLOBAL FUNCTIONS: |
| 117 | +/* |
| 118 | + * wahDoEncode issues an encode command to ffmpeg2theora |
| 119 | + */ |
| 120 | +function wahDoEncode($source, $target, $encodeSettings ){ |
| 121 | + global $wgffmpeg2theora; |
| 122 | + $cmd = wfEscapeShellArg( $wgffmpeg2theora ) . ' ' . wfEscapeShellArg( $source ); |
| 123 | + $wah = new WikiAtHome(); |
| 124 | + foreach($encodeSettings as $key=>$val){ |
| 125 | + if( isset( $wah->foggMap[$key] ) ){ |
| 126 | + if( is_array( $wah->foggMap[$key] ) ){ |
| 127 | + $cmd.= ' '. implode(' ', $wah->foggMap[$key] ); |
| 128 | + }else if($val == 'true'|| $val===true){ |
| 129 | + $cmd.= ' '. $wah->foggMap[$key]; |
| 130 | + }else if( $val === false){ |
| 131 | + //ignore "false" flags |
| 132 | + }else{ |
| 133 | + //normal get/set value |
| 134 | + $cmd.= ' '. $wah->foggMap[$key] . ' ' . wfEscapeShellArg( $val ); |
| 135 | + } |
| 136 | + } |
| 137 | + } |
| 138 | + //add the output target: |
| 139 | + $cmd.= ' -o ' . wfEscapeShellArg ( $target ); |
73 | 140 | |
| 141 | + wfProfileIn( 'ffmpeg2theora_encode' ); |
| 142 | + wfShellExec( $cmd, $retval ); |
| 143 | + wfProfileOut( 'ffmpeg2theora_encode' ); |
| 144 | + |
| 145 | + if( $retval ){ |
| 146 | + return false; |
| 147 | + } |
| 148 | + return true; |
| 149 | +} |
| 150 | + |
74 | 151 | /* |
75 | 152 | * gets the json metadata from a given file (also validates it as a valid file) |
76 | 153 | */ |
77 | 154 | function wahGetMediaJsonMeta( $path ){ |
78 | | - global $wgffmpeg2theora; |
| 155 | + global $wgffmpeg2theora, $wahFFmpeg2theoraFoggMap; |
79 | 156 | |
80 | 157 | $cmd = wfEscapeShellArg( $wgffmpeg2theora ) . ' ' . wfEscapeShellArg ( $path ). ' --info'; |
81 | 158 | wfProfileIn( 'ffmpeg2theora' ); |
— | — | @@ -116,12 +193,26 @@ |
117 | 194 | //the oggCat path enables server side concatenation of encoded "chunks" |
118 | 195 | $wgOggCat = '/usr/local/bin/oggCat'; |
119 | 196 | |
120 | | -//with oggCat installed then we can do encoding jobs in "chunks" |
| 197 | +//with oggCat installed then we can do jobs in "chunks" |
121 | 198 | //and assemble on the server: (this way no single slow client slows down |
122 | | -//a video job and we can have tighter timeouts) |
| 199 | +//a video job) |
123 | 200 | // $wgChunkDuration is set in seconds: (setting this too low will result in bad encodes) |
124 | 201 | // $wgChunkDuration is only used if we have a valid $wgOggCat install |
125 | | -$wgChunkDuration = '30'; |
| 202 | +$wgJobTypeConfig = array( |
| 203 | + 'transcode' => array( |
| 204 | + //set chunk duration to zero to not split the file |
| 205 | + 'chunkDuration'=> 0, |
| 206 | + // if the api should assign the job on the Special:WikiAtHome page |
| 207 | + // (or via other external api scripts) |
| 208 | + 'assignAtHome' => false, |
| 209 | + 'assignInternal'=> true |
| 210 | + ), |
| 211 | + 'flatten'=> array( |
| 212 | + 'chunkDuration'=> 10, |
| 213 | + 'assignAtHome' => true, |
| 214 | + 'assignInternal' => false |
| 215 | + ) |
| 216 | +); |
126 | 217 | |
127 | 218 | //time interval in seconds between clients asking the server for jobs. |
128 | 219 | $wgClientSearchInterval = 60; |
Index: trunk/extensions/WikiAtHome/WahJobManager.php |
— | — | @@ -120,24 +120,31 @@ |
121 | 121 | * |
122 | 122 | * returns the jobs object or false if no jobs are available |
123 | 123 | */ |
124 | | - static function getNewJob( $jobset_id = false ){ |
125 | | - global $wgNumberOfClientsPerJobSet, $wgJobTimeOut, $wgUser; |
| 124 | + static function getNewJob( $jobset_id = false , $reqMode = 'AtHome'){ |
| 125 | + global $wgNumberOfClientsPerJobSet, $wgJobTimeOut, $wgUser, $wgJobTypeConfig; |
126 | 126 | $dbr = wfGetDb( DB_READ ); |
127 | | - //check if we have jobset |
| 127 | + |
128 | 128 | //its always best to assigning from jobset (since the user already has the data) |
129 | 129 | if( $jobset_id ){ |
130 | | - //try to get one from the current jobset |
131 | | - $job = $dbr->selectRow( 'wah_jobqueue', |
132 | | - '*', |
133 | | - array( |
134 | | - 'job_set_id' => intval( $jobset_id ), |
135 | | - 'job_done_time IS NULL', |
136 | | - 'job_last_assigned_time < '. $dbr->addQuotes( time() - $wgJobTimeOut ) |
137 | | - ), |
138 | | - __METHOD__ |
139 | | - ); |
140 | | - if( $job ){ |
141 | | - return WahJobManager::assignJob( $job ); |
| 130 | + $jobSet = WahJobManager::getJobSetById( $jobset_id ); |
| 131 | + if(!$jobSet) |
| 132 | + return false; //not a valid job_set key (no jobs for you) |
| 133 | + |
| 134 | + //check if the jobset is an accepted job type |
| 135 | + if( WahJobManager::validateJobType( $jobSet->set_job_type, $reqMode) ){ |
| 136 | + //try to get one from the current jobset |
| 137 | + $job = $dbr->selectRow( 'wah_jobqueue', |
| 138 | + '*', |
| 139 | + array( |
| 140 | + 'job_set_id' => intval( $jobset_id ), |
| 141 | + 'job_done_time IS NULL', |
| 142 | + 'job_last_assigned_time < '. $dbr->addQuotes( time() - $wgJobTimeOut ) |
| 143 | + ), |
| 144 | + __METHOD__ |
| 145 | + ); |
| 146 | + if( $job ){ |
| 147 | + return WahJobManager::assignJob( $job ); |
| 148 | + } |
142 | 149 | } |
143 | 150 | } |
144 | 151 | |
— | — | @@ -150,20 +157,46 @@ |
151 | 158 | ), |
152 | 159 | __METHOD__ |
153 | 160 | ); |
154 | | - //re-assign the same job (don't update anything so it can timeout if they keep getting the same job) |
155 | | - if( $job ){ |
156 | | - return WahJobManager::assignJob( $job , false, false); |
| 161 | + if($job){ |
| 162 | + $jobSet = WahJobManager::getJobSetById( $job->job_set_id ); |
| 163 | + //make sure the job is oky to assign: |
| 164 | + if( WahJobManager::validateJobType( $jobSet->set_job_type, $reqMode) ){ |
| 165 | + //re-assign the same job (don't update anything so it can timeout if they keep getting the same job) |
| 166 | + return WahJobManager::assignJob( $job , false, false); |
| 167 | + } |
157 | 168 | } |
158 | 169 | |
| 170 | + $conditionAry =array( |
| 171 | + 'set_done_time IS NULL', |
| 172 | + 'set_client_count < '. $dbr->addQuotes( $wgNumberOfClientsPerJobSet ) |
| 173 | + ); |
| 174 | + |
| 175 | + //build a request to get a compatible job: |
| 176 | + $okyJobOrList = ''; |
| 177 | + $or = ''; |
| 178 | + foreach($wgJobTypeConfig as $tKey=>$tSet){ |
| 179 | + if( $tSet['assign' . $reqMode] ){ |
| 180 | + $okyJobOrList = $or . ' ( set_job_type = ' . $dbr->addQuotes( $tKey ) . ' )'; |
| 181 | + $or = ' OR '; |
| 182 | + } |
| 183 | + } |
| 184 | + //no valid jobs: |
| 185 | + if( $okyJobOrList=='' ){ |
| 186 | + return false; |
| 187 | + } |
| 188 | + //else add it to the sql statement : |
| 189 | + if( $okyJobOrList != '' ){ |
| 190 | + //no types are assignAtHome |
| 191 | + $conditionAry[] = $okyJobOrList; |
| 192 | + } |
| 193 | + |
159 | 194 | //just do a normal select from jobset |
160 | 195 | $jobSet = $dbr->selectRow( 'wah_jobset', |
161 | 196 | '*', |
162 | | - array( |
163 | | - 'set_done_time IS NULL', |
164 | | - 'set_client_count < '. $dbr->addQuotes( $wgNumberOfClientsPerJobSet ) |
165 | | - ), |
| 197 | + $conditionAry, |
166 | 198 | __METHOD__ |
167 | 199 | ); |
| 200 | + |
168 | 201 | if( !$jobSet ){ |
169 | 202 | //no jobs: |
170 | 203 | return false; |
— | — | @@ -189,6 +222,10 @@ |
190 | 223 | } |
191 | 224 | } |
192 | 225 | } |
| 226 | + static function validateJobType( $reqType, $reqMode ){ |
| 227 | + global $wgJobTypeConfig; |
| 228 | + return $wgJobTypeConfig[ $reqType ][ 'assign' . $reqMode ]; |
| 229 | + } |
193 | 230 | /* |
194 | 231 | * assigns a job: |
195 | 232 | * |
— | — | @@ -265,13 +302,17 @@ |
266 | 303 | * setups up a new job |
267 | 304 | */ |
268 | 305 | function doJobSetup(){ |
269 | | - global $wgChunkDuration, $wgDerivativeSettings; |
| 306 | + global $wgDerivativeSettings, $wgJobTypeConfig; |
270 | 307 | $fname = 'WahJobManager::doJobSetup'; |
271 | 308 | $dbw = &wfGetDb( DB_WRITE ); |
272 | | - //figure out how many sub-jobs we will have: |
273 | | - $length = $this->file->handler->getLength( $this->file ); |
274 | 309 | |
275 | | - $set_job_count = ceil( $length / $wgChunkDuration ); |
| 310 | + if( $wgJobTypeConfig[ $this->getJobTypeKey() ]['chunkDuration'] == 0){ |
| 311 | + $set_job_count = 1; |
| 312 | + }else{ |
| 313 | + //figure out how many sub-jobs we will have: |
| 314 | + $length = $this->file->handler->getLength( $this->file ); |
| 315 | + $set_job_count = ceil( $length / $wgJobTypeConfig[ $jobTypeKey ]['chunkDuration'] ); |
| 316 | + } |
276 | 317 | |
277 | 318 | //first insert the job set |
278 | 319 | $dbw->insert('wah_jobset', |
— | — | @@ -281,35 +322,110 @@ |
282 | 323 | 'set_jobs_count' => $set_job_count, |
283 | 324 | 'set_encodekey' => $this->sEncodeKey, |
284 | 325 | 'set_creation_time' => time() |
285 | | - ),$fname |
| 326 | + ), $fname |
286 | 327 | ); |
287 | 328 | $this->sId = $dbw->insertId(); |
288 | 329 | |
289 | 330 | //generate the job data |
| 331 | + $jobInsertArray = $this->gennerateJobData(); |
| 332 | + |
| 333 | + //now insert the jobInsertArray |
| 334 | + $dbw->insert( 'wah_jobqueue', $jobInsertArray, $fname ); |
| 335 | + } |
| 336 | + function gennerateJobData(){ |
| 337 | + global $wgJobTypeConfig, $wgDerivativeSettings; |
| 338 | + |
| 339 | + $jobTypeKey = $this->getJobTypeKey(); |
| 340 | + |
| 341 | + //set the base encode settings: |
| 342 | + $encSettingsAry = $wgDerivativeSettings[ $this->sEncodeKey ]; |
| 343 | + |
| 344 | + //init the jobs array: |
290 | 345 | $jobInsertArray = array(); |
291 | | - for( $i=0 ; $i < $set_job_count; $i++ ){ |
292 | | - $encSettingsAry = $wgDerivativeSettings[ $this->sEncodeKey ]; |
293 | | - $encSettingsAry['starttime'] = $i * $wgChunkDuration; |
294 | | - //should be oky that the last endtime is > than length |
295 | | - $encSettingsAry['endtime'] = $encSettingsAry['starttime'] + $wgChunkDuration; |
296 | 346 | |
297 | | - $jobJsonAry = array( |
298 | | - 'jobType' => 'transcode', |
299 | | - 'encodeSettings' => $encSettingsAry |
300 | | - ); |
| 347 | + $jobJsonAry = array( |
| 348 | + 'jobType' => $jobTypeKey, |
| 349 | + 'encodeSettings' => $encSettingsAry |
| 350 | + ); |
301 | 351 | |
302 | | - //add starttime and endtime |
| 352 | + //check if we need to split into chunks: (if chunk duration is zero don't split) |
| 353 | + if( $wgJobTypeConfig[ $jobTypeKey ]['chunkDuration'] == 0 ){ |
303 | 354 | $jobInsertArray[] = |
304 | 355 | array( |
305 | 356 | 'job_set_id' => $this->sId, |
306 | | - 'job_order_id' => $i, |
| 357 | + 'job_order_id' => 0, |
307 | 358 | 'job_json' => ApiFormatJson::getJsonEncode( $jobJsonAry ) |
308 | 359 | ); |
| 360 | + }else{ |
| 361 | + for( $i=0 ; $i < $set_job_count; $i++ ){ |
| 362 | + //add starttime and endtime |
| 363 | + $jobJsonAry['encodeSettings']['starttime'] = $i * $wgJobTypeConfig[ $jobTypeKey ]['chunkDuration']; |
| 364 | + //should be oky that the last endtime is > than length |
| 365 | + $jobJsonAry['encodeSettings']['endtime'] = $encSettingsAry['starttime'] + $wgChunkDuration; |
| 366 | + $jobInsertArray[] = |
| 367 | + array( |
| 368 | + 'job_set_id' => $this->sId, |
| 369 | + 'job_order_id' => $i, |
| 370 | + 'job_json' => ApiFormatJson::getJsonEncode( $jobJsonAry ) |
| 371 | + ); |
| 372 | + } |
309 | 373 | } |
310 | | - //now insert the jobInsertArray |
311 | | - $dbw->insert( 'wah_jobqueue', $jobInsertArray, $fname ); |
| 374 | + return $jobInsertArray; |
312 | 375 | } |
| 376 | + function getJobTypeKey(){ |
| 377 | + //if its in the file namespace the job is a transcode |
| 378 | + if($this->sNamespace == NS_FILE){ |
| 379 | + return 'transcode'; |
| 380 | + } |
| 381 | + //if its in the "sequence" namespace then its a flatten job |
| 382 | + } |
| 383 | + function updateSetDone($jobSet, $user_id=0){ |
| 384 | + $dbw = &wfGetDb( DB_WRITE ); |
| 385 | + $dbw->update('wah_jobset', |
| 386 | + array( |
| 387 | + 'set_done_time' => time() |
| 388 | + ), |
| 389 | + array( |
| 390 | + 'set_id' => $jobSet->set_id |
| 391 | + ), |
| 392 | + __METHOD__, |
| 393 | + array( |
| 394 | + 'LIMIT' => 1 |
| 395 | + ) |
| 396 | + ); |
| 397 | + } |
| 398 | + function updateJobDone(&$job, $user_id=0){ |
| 399 | + $dbw = &wfGetDb( DB_WRITE ); |
| 400 | + //update the jobqueue table with job done time & user |
| 401 | + $dbw->update('wah_jobqueue', |
| 402 | + array( |
| 403 | + 'job_done_user_id' => $user_id, |
| 404 | + 'job_done_time' => time() |
| 405 | + ), |
| 406 | + array( |
| 407 | + 'job_id' => $job->job_id |
| 408 | + ), |
| 409 | + __METHOD__, |
| 410 | + array( |
| 411 | + 'LIMIT' => 1 |
| 412 | + ) |
| 413 | + ); |
313 | 414 | |
| 415 | + // reduce job_client_count by 1 now that this client is "done" |
| 416 | + $dbw->update('wah_jobset', |
| 417 | + array( |
| 418 | + 'set_client_count = set_client_count -1' |
| 419 | + ), |
| 420 | + array( |
| 421 | + 'set_id' => $job->job_set_id |
| 422 | + ), |
| 423 | + __METHOD__, |
| 424 | + array( |
| 425 | + 'LIMIT' => 1 |
| 426 | + ) |
| 427 | + ); |
| 428 | + } |
| 429 | + |
314 | 430 | } |
315 | 431 | |
316 | 432 | ?> |
\ No newline at end of file |
Index: trunk/extensions/WikiAtHome/internalCmdLineEncoder.php |
— | — | @@ -0,0 +1,83 @@ |
| 2 | +<? |
| 3 | +/* |
| 4 | + * this encoder is run from the command line and takes "encoding" jobs from the command line |
| 5 | + */ |
| 6 | + |
| 7 | +require_once ( '../../maintenance/commandLine.inc' ); |
| 8 | + |
| 9 | +//get the jobset list |
| 10 | +//@@todo input handling |
| 11 | + |
| 12 | +/*if ( count( $args ) == 0 || isset ( $options['help'] ) ) { |
| 13 | + print<<<EOT |
| 14 | +Loads encoding jobs from the database and encodes them. |
| 15 | + internalCmdLineEncoder [options] |
| 16 | + -t [threads] how many transcode threads to run |
| 17 | + --runOnce run once flag has the application run just one time |
| 18 | +EOT; |
| 19 | +die(); |
| 20 | +}*/ |
| 21 | + |
| 22 | +//number of threads (not yet supported) |
| 23 | +$wahNumberOfThreads = 1; |
| 24 | + |
| 25 | +//single run flag |
| 26 | +$wahRunOnce = true; |
| 27 | + |
| 28 | +//delay between job searches |
| 29 | +$wahJobDelay = 10; |
| 30 | + |
| 31 | +$wahStatusOutput = true; |
| 32 | + |
| 33 | +//make sure we have wikiAtHome |
| 34 | +if( !class_exists(WikiAtHome) ){ |
| 35 | + die( ' "WikiAtHome" is required for the internal encoder'); |
| 36 | +} |
| 37 | + |
| 38 | + |
| 39 | +doJobLoop(); |
| 40 | +function doJobLoop(){ |
| 41 | + global $wgJobTypeConfig, $wahJobDelay, $wahRunOnce, $wahStatusOutput; |
| 42 | + |
| 43 | + //look for jobs (sleep for $wahJobDelay if none found) |
| 44 | + $job = WahJobManager :: getNewJob(false, 'Internal'); |
| 45 | + if(!$job && $wahRunOnce == false){ |
| 46 | + if($wahStatusOutput) |
| 47 | + print "no jobs found waiting $wahJobDelay \n"; |
| 48 | + sleep($wahJobDelay); |
| 49 | + return doJobLoop(); |
| 50 | + }else if(!$job && $wahRunOnce == true){ |
| 51 | + if($wahStatusOutput) |
| 52 | + print "no job found \n"; |
| 53 | + return ; |
| 54 | + } |
| 55 | + |
| 56 | + $jobSet = WahJobManager ::getJobSetById( $job->job_set_id ); |
| 57 | + $jobDetails = json_decode( $job->job_json ) ; |
| 58 | + |
| 59 | + //get the title (so we can access the source file) |
| 60 | + $fTitle = Title::newFromText( $job->title, $job->ns ); |
| 61 | + $file = wfLocalFile( $fTitle ); |
| 62 | + $thumbPath = $file->getThumbPath( $jobSet->set_encodekey ); |
| 63 | + //make sure the directory is ready: |
| 64 | + wfMkdirParents( $thumbPath ); |
| 65 | + |
| 66 | + $destTarget = $thumbPath . '.ogg'; |
| 67 | + //issue the encoding command |
| 68 | + if($wahStatusOutput) print "Running Encode Command...\n"; |
| 69 | + wahDoEncode($file->getPath(), $destTarget, $jobDetails->encodeSettings ); |
| 70 | + |
| 71 | + //once done with encode update the status: |
| 72 | + WahJobManager :: updateJobDone($job); |
| 73 | + //update set done (if only one item in the set) |
| 74 | + $wjm = WahJobManager::newFromSet( $jobSet ); |
| 75 | + $percDone = $wjm->getDonePerc(); |
| 76 | + if( $percDone == 1 ){ |
| 77 | + WahJobManager :: updateSetDone( $jobSet ); |
| 78 | + }else{ |
| 79 | + if($wahStatusOutput) |
| 80 | + print "job not complete? (might be mixing chunkDuration types?) "; |
| 81 | + } |
| 82 | +} |
| 83 | + |
| 84 | +?> |
\ No newline at end of file |