2018-07-15 21:30:12 +02:00
package de.mrbesen.youtubecrawler ;
import java.io.BufferedWriter ;
import java.io.File ;
import java.io.FileWriter ;
import java.io.IOException ;
import java.io.PrintWriter ;
2018-07-17 13:42:06 +02:00
import java.text.DateFormat ;
import java.text.SimpleDateFormat ;
2018-07-16 23:22:32 +02:00
import java.util.ArrayList ;
2018-07-17 13:42:06 +02:00
import java.util.Date ;
2018-07-15 21:30:12 +02:00
import java.util.LinkedList ;
import java.util.List ;
2018-07-23 12:27:51 +02:00
import java.util.NoSuchElementException ;
2018-07-15 21:30:12 +02:00
import java.util.Scanner ;
import org.apache.log4j.Logger ;
public class Crawler implements Runnable {
2018-07-23 12:27:51 +02:00
private int jobspeerthread = 100 ; //the amount of jobs a thread get peer request
2018-10-12 17:22:36 +02:00
int requestlimit = 5 ; //amount of videos to be left in the todo que of a thread until it requests new videos
private int idlecount = 5 ; //amount of idle loops allowed
2018-07-16 23:22:32 +02:00
2018-07-15 21:30:12 +02:00
private LinkedList < String > toSave = new LinkedList < > ( ) ; //all found ytids, witch need to be analysed
private LinkedList < String > toCrawl = new LinkedList < > ( ) ; //all videos tu crawl
2018-10-11 15:32:19 +02:00
//private LinkedList<String> toknown = new LinkedList<>();//list with all videos, to test if they are allready known, if not they are moved to tocrawle
2018-07-16 23:22:32 +02:00
private List < CrawlerThread > threads ; //list of all threads
private List < CrawlerThread > requested = new LinkedList < > ( ) ;
2018-07-23 12:27:51 +02:00
2018-07-17 13:42:06 +02:00
private static DateFormat dateform = new SimpleDateFormat ( " dd-MM-yyyy HH:mm:ss " ) ;
2018-07-19 20:34:11 +02:00
private long start ;
2018-07-20 19:57:43 +02:00
2018-07-15 21:30:12 +02:00
private boolean crawl = true ;
private int crawlcount = 0 ;
2018-10-12 17:22:36 +02:00
//private int updateOffset = 0;
2018-07-16 23:22:32 +02:00
2018-07-15 21:30:12 +02:00
private DB db = new DB ( ) ;
private YoutubeAPI api = new YoutubeAPI ( ) ;
private File crawlfile = new File ( " crawl.txt " ) ;
2018-07-23 12:27:51 +02:00
private Logger log = Logger . getLogger ( this . getClass ( ) . getName ( ) ) ;
2018-10-11 15:32:19 +02:00
private Profiler profiler = new Profiler ( ) ;
2018-07-16 23:22:32 +02:00
2018-10-11 15:32:19 +02:00
private int startup = 2 ; //to keep the beginning cool - counter how often the program is allowed to enter startup sleep
2018-07-16 23:22:32 +02:00
public Crawler ( ) {
2018-07-23 12:27:51 +02:00
try {
jobspeerthread = Integer . parseInt ( Config . prop . getProperty ( " crawler.jobspeerthread " ) ) ;
} catch ( NumberFormatException e ) {
log . warn ( " could not read the number \" " + Config . prop . getProperty ( " crawler.jobspeerthread " ) + " \" from the config file. maxvideo " ) ;
jobspeerthread = 100 ;
}
2018-10-12 17:22:36 +02:00
try {
requestlimit = Integer . parseInt ( Config . prop . getProperty ( " crawler.requestlimit " ) ) ;
} catch ( NumberFormatException e ) {
log . warn ( " could not read the number \" " + Config . prop . getProperty ( " crawler.requestlimit " ) + " \" from the config file. crawler.requestlimit " ) ;
}
try {
idlecount = Integer . parseInt ( Config . prop . getProperty ( " crawler.idlecount " ) ) ;
} catch ( NumberFormatException e ) {
log . warn ( " could not read the number \" " + Config . prop . getProperty ( " crawler.idlecount " ) + " \" from the config file. crawler.idlecount " ) ;
}
2018-07-16 23:22:32 +02:00
}
2018-07-15 21:30:12 +02:00
public void stop ( ) {
crawl = false ;
}
2018-07-17 13:42:06 +02:00
public synchronized void addtoCrawl ( String videoid ) {
2018-10-11 15:32:19 +02:00
//if(! (toCrawl.contains(videoid) || toknown.contains(videoid)))
//toknown.add(videoid);
if ( toCrawl . contains ( videoid ) ) {
2018-11-13 14:49:04 +01:00
ArrayList < String > str = new ArrayList < String > ( 1 ) ;
2018-10-11 15:32:19 +02:00
str . add ( videoid ) ;
2018-11-14 11:39:21 +01:00
db . storeTemp ( str , false ) ;
2018-10-11 15:32:19 +02:00
}
2018-07-15 21:30:12 +02:00
}
2018-07-16 23:22:32 +02:00
public boolean isCrawling ( ) {
return crawl ;
}
public void request ( CrawlerThread t ) {
if ( ! toCrawl . isEmpty ( ) ) {
send ( t ) ;
} else {
requested . add ( t ) ;
2018-07-15 21:30:12 +02:00
}
}
2018-07-20 19:57:43 +02:00
2018-07-16 23:22:32 +02:00
private void send ( CrawlerThread t ) {
2018-07-23 12:27:51 +02:00
// listlock.writeLock().lock();
2018-07-16 23:22:32 +02:00
for ( int i = 0 ; i < jobspeerthread & & ! toCrawl . isEmpty ( ) ; i + + ) {
t . todo . add ( toCrawl . removeFirst ( ) ) ;
}
2018-07-23 12:27:51 +02:00
// listlock.writeLock().unlock();
2018-07-16 23:22:32 +02:00
t . requested = false ;
}
2018-07-15 21:30:12 +02:00
@Override
public void run ( ) {
2018-10-11 15:32:19 +02:00
profiler . profilingEnabled = true ;
profiler . clearProfiling ( ) ;
profiler . startSection ( " root " ) ;
profiler . startSection ( " startup " ) ;
profiler . startSection ( " loadingcrawlfile " ) ;
2018-07-19 20:34:11 +02:00
start = System . currentTimeMillis ( ) ;
2018-07-15 21:30:12 +02:00
log . info ( " Try to load crawlfile " ) ;
if ( crawlfile . exists ( ) ) {
try {
Scanner in = new Scanner ( crawlfile ) ;
2018-10-11 15:32:19 +02:00
//boolean crawl = true;//section of file
2018-07-15 21:30:12 +02:00
while ( in . hasNextLine ( ) ) {
String line = in . nextLine ( ) ;
if ( line = = null ) {
break ;
} else {
if ( ! line . isEmpty ( ) ) {
2018-07-17 13:42:06 +02:00
if ( line . equals ( " - " ) ) { //section delimiter
2018-10-11 15:32:19 +02:00
continue ;
2018-07-17 13:42:06 +02:00
} else {
2018-10-11 15:32:19 +02:00
//if(crawl) {
toCrawl . add ( line ) ;
/ * } else {
2018-07-17 13:42:06 +02:00
toknown . add ( line ) ;
2018-10-11 15:32:19 +02:00
} * /
2018-07-17 13:42:06 +02:00
}
2018-07-15 21:30:12 +02:00
}
}
}
in . close ( ) ;
} catch ( IOException e ) {
log . warn ( " Error while loading crawl file. " ) ;
e . printStackTrace ( ) ;
}
}
2018-10-12 17:22:36 +02:00
profiler . endStartSection ( " populateThreads " ) ; //loading crawlfile closed
2018-07-16 23:22:32 +02:00
//populate threads
int threadcount = 4 ;
try {
threadcount = Integer . parseInt ( Config . prop . getProperty ( " crawler.threadcount " ) ) ;
} catch ( NumberFormatException e ) {
log . warn ( " Could not read the Number \" " + Config . prop . getProperty ( " crawler.threadcount " ) + " \" from the Config. " ) ;
}
threads = new ArrayList < > ( threadcount ) ;
2018-07-20 19:57:43 +02:00
2018-07-16 23:22:32 +02:00
for ( int i = 0 ; i < threadcount ; i + + ) {
CrawlerThread thr = new CrawlerThread ( this ) ;
2018-09-11 12:12:51 +02:00
thr . setThread ( new Thread ( thr , " Crawler # " + i ) ) ;
2018-07-16 23:22:32 +02:00
threads . add ( thr ) ;
2018-09-11 12:12:51 +02:00
thr . thread . start ( ) ;
2018-07-16 23:22:32 +02:00
}
2018-10-12 17:22:36 +02:00
profiler . endStartSection ( " deleteDouble " ) ; //populate threads
2018-09-11 12:12:51 +02:00
long lastdoubledelete = System . currentTimeMillis ( ) ;
2018-10-11 15:32:19 +02:00
//db.deleteDouble();
2018-10-12 17:22:36 +02:00
profiler . endSection ( ) ; //deletedouble
2018-10-11 15:32:19 +02:00
profiler . endSection ( ) ; //startup
boolean savedall = false ; //ein 2. durch lauf, um wirklich alles zu speichern
while ( crawl | | savedall ) {
2018-10-12 17:22:36 +02:00
profiler . startSection ( " main " ) ;
2018-10-11 15:32:19 +02:00
log . info ( " to Crawl: " + toCrawl . size ( ) + /*" known: " + toknown.size() +*/ " Time: " + dateform . format ( new Date ( ) ) ) ;
if ( ! crawl )
savedall = true ;
2018-09-11 00:05:10 +02:00
try {
//fullfill request
2018-10-11 15:32:19 +02:00
profiler . startSection ( " fullfill request " ) ;
2018-09-11 00:05:10 +02:00
while ( ! requested . isEmpty ( ) & & ! toCrawl . isEmpty ( ) & & crawl ) {
log . info ( " fullfill request " ) ;
send ( requested . remove ( 0 ) ) ;
2018-07-19 20:34:11 +02:00
}
2018-07-20 19:57:43 +02:00
2018-09-11 00:05:10 +02:00
//kindof idle
2018-10-12 17:22:36 +02:00
{
int count = 0 ; //donst stay to long in idle!
profiler . endStartSection ( " idle " ) ;
while ( toCrawl . size ( ) > ( jobspeerthread * threads . size ( ) * 2 ) & & crawl & & requested . isEmpty ( ) & & count < idlecount ) {
count + + ;
startup = 0 ; //stop startup count
if ( ( System . currentTimeMillis ( ) - lastdoubledelete ) / 1000 > 1800 ) {
//db.deleteDouble();
lastdoubledelete = System . currentTimeMillis ( ) ;
} else {
Thread . yield ( ) ;
try {
Thread . sleep ( 100 ) ;
} catch ( InterruptedException ignored ) {
break ;
}
2018-09-11 12:12:51 +02:00
}
2018-10-12 17:22:36 +02:00
// updateDB();
2018-07-16 20:12:20 +02:00
}
2018-09-11 00:05:10 +02:00
}
//nothing left?
2018-10-11 15:32:19 +02:00
if ( /*toknown.isEmpty() && */ toCrawl . isEmpty ( ) & & requested . size ( ) = = threads . size ( ) ) { //very uncommon
2018-09-11 00:05:10 +02:00
log . warn ( " nothing left to crawl " ) ;
2018-07-16 23:22:32 +02:00
}
2018-07-20 19:57:43 +02:00
2018-09-11 00:05:10 +02:00
//refil the tocrawl list.
2018-10-11 15:32:19 +02:00
/ * if ( ! toknown . isEmpty ( ) ) {
2018-09-11 00:05:10 +02:00
//check in db for known videos
log . info ( " Checking the DB " ) ;
currentstate = " get new tocrawl " ;
// listlock.writeLock().lock();
while ( toCrawl . size ( ) < jobspeerthread * threads . size ( ) * 2 & & crawl & & ! toknown . isEmpty ( ) ) {
LinkedList < String > tocheck = new LinkedList < > ( ) ;
for ( int i = 0 ; i < toknown . size ( ) & & i < maxvideostotest ; i + + ) {
tocheck . add ( toknown . removeFirst ( ) ) ;
}
toCrawl . addAll ( db . checkvideos ( tocheck ) ) ;
}
// listlock.writeLock().unlock();
2018-10-11 15:32:19 +02:00
}
while ( toknown . size ( ) < threadcount * jobspeerthread * 20 & & crawl ) {
2018-09-11 00:05:10 +02:00
currentstate = " restoretemp " ;
log . info ( " restoreTemp " ) ;
LinkedList < String > rest = db . restoreTemp ( ) ;
toknown . addAll ( rest ) ;
2018-10-11 15:32:19 +02:00
} * /
{
profiler . endStartSection ( " loadCrawl " ) ;
boolean joined = true ;
while ( toCrawl . size ( ) < ( threadcount * jobspeerthread * 3 ) & & crawl ) {
if ( joined ) {
joined = false ;
log . info ( " loadCrawl " ) ;
}
LinkedList < String > rest = db . restoreTemp ( ) ;
toCrawl . addAll ( rest ) ;
}
2018-07-15 21:30:12 +02:00
}
2018-09-11 00:05:10 +02:00
//writing crawlfile
2018-10-11 15:32:19 +02:00
profiler . endStartSection ( " writingcrawlfile " ) ;
2018-09-11 00:05:10 +02:00
log . info ( " Writing Crawlfile " ) ;
try {
PrintWriter p = new PrintWriter ( new BufferedWriter ( new FileWriter ( crawlfile ) ) ) ;
for ( String t : toCrawl ) {
p . println ( t ) ;
}
p . close ( ) ;
} catch ( IOException e ) {
log . error ( " Error writing crawlfile. " , e ) ;
2018-07-17 13:42:06 +02:00
}
2018-07-20 19:57:43 +02:00
2018-09-11 00:05:10 +02:00
//get reports
2018-10-11 15:32:19 +02:00
profiler . endStartSection ( " getreport " ) ;
2018-09-11 00:05:10 +02:00
log . info ( " get report " ) ;
for ( CrawlerThread crawlerThread : threads ) {
2018-10-11 15:32:19 +02:00
String threadname = crawlerThread . thread . getName ( ) ;
profiler . startSection ( " T " + threadname . substring ( threadname . lastIndexOf ( '#' ) + 1 ) ) ;
2018-09-11 00:05:10 +02:00
LinkedList < String > [ ] report = crawlerThread . report ( ) ;
crawlcount + = report [ 0 ] . size ( ) ;
toSave . addAll ( report [ 0 ] ) ;
crawlerThread . crawled . clear ( ) ;
2018-10-11 15:32:19 +02:00
int count = 0 ;
2018-09-11 12:12:51 +02:00
while ( report [ 1 ] . size ( ) > 1 ) { //2 videos werden ggf. gelöscht ohne gesehen zu werden.
2018-11-13 14:49:04 +01:00
ArrayList < String > store = null ;
2018-09-11 00:05:10 +02:00
try {
2018-10-11 15:32:19 +02:00
if ( report [ 1 ] . size ( ) < = 50 ) {
2018-11-13 14:49:04 +01:00
store = new ArrayList < > ( report [ 1 ] ) ;
2018-10-11 15:32:19 +02:00
count + = report [ 1 ] . size ( ) ;
report [ 1 ] . clear ( ) ;
} else {
2018-11-13 14:49:04 +01:00
store = new ArrayList < > ( report [ 1 ] . subList ( 0 , 50 ) ) ;
report [ 1 ] . removeAll ( store ) ;
count + = 50 ;
2018-09-11 00:05:10 +02:00
}
} catch ( NoSuchElementException ignored ) { //concurrentmodification fuckery
log . info ( " no suchelement bla " ) ;
2018-07-23 12:27:51 +02:00
}
2018-11-14 11:39:21 +01:00
db . storeTemp ( store , false ) ;
2018-07-23 12:27:51 +02:00
}
2018-10-12 20:03:53 +02:00
log . info ( count + " videos added from " + threadname ) ;
2018-10-11 15:32:19 +02:00
profiler . endSection ( ) ;
2018-07-23 12:27:51 +02:00
}
2018-07-16 23:22:32 +02:00
2018-10-11 15:32:19 +02:00
profiler . endStartSection ( " debug " ) ;
2018-09-11 00:05:10 +02:00
long runtimes = ( System . currentTimeMillis ( ) - start ) / 1000 ;
if ( runtimes < 0 )
runtimes = 1 ;
float vidps = ( crawlcount / ( float ) runtimes ) ; //videos per second
Main . getMain ( ) . broadcastAdmin ( vidps + " v/s " + crawlcount + " total V " ) ;
//save to db
2018-10-11 15:32:19 +02:00
profiler . endStartSection ( " save2DB " ) ;
2018-09-11 00:05:10 +02:00
log . info ( " save " + toSave . size ( ) + " videos to DB. " ) ;
while ( ! toSave . isEmpty ( ) ) {
LinkedList < String > videoids = new LinkedList < > ( ) ;
for ( int i = 0 ; i < 50 & & ! toSave . isEmpty ( ) ; i + + ) {
videoids . add ( toSave . remove ( 0 ) ) ;
}
if ( videoids . size ( ) > 0 ) {
2018-10-11 15:32:19 +02:00
profiler . startSection ( " getinfo " ) ;
2018-10-12 17:22:36 +02:00
ArrayList < Video > videos = ( ArrayList < Video > ) api . getInfos ( videoids ) [ 0 ] ;
2018-10-11 15:32:19 +02:00
profiler . endStartSection ( " sendtoDB " ) ;
2018-10-12 17:22:36 +02:00
db . addVideos ( videos , false ) ;
2018-10-12 20:03:53 +02:00
profiler . endSection ( ) ; //sendtoDB
2018-09-11 00:05:10 +02:00
}
2018-07-15 21:30:12 +02:00
}
2018-10-12 17:22:36 +02:00
profiler . endSection ( ) ; //save2DB
2018-07-20 19:57:43 +02:00
2018-09-11 00:05:10 +02:00
//at the beginning there is maybe just one video to crawl, so keep it calm.
if ( startup > 0 ) {
2018-10-11 15:32:19 +02:00
profiler . startSection ( " startupsleep " ) ;
2018-09-11 00:05:10 +02:00
startup - - ;
log . info ( " startup sleep " ) ;
try {
Thread . sleep ( 2000 ) ;
} catch ( InterruptedException e ) { }
2018-10-11 15:32:19 +02:00
finally {
2018-10-12 17:22:36 +02:00
profiler . endSection ( ) ; //startupsleep
2018-10-11 15:32:19 +02:00
}
2018-09-11 00:05:10 +02:00
}
} catch ( Throwable t ) {
log . warn ( " exception in Crawler! " , t ) ;
StringBuilder sb = new StringBuilder ( ) ;
for ( StackTraceElement elem : t . getStackTrace ( ) ) {
sb . append ( elem . getFileName ( ) + " ( " ) . append ( elem . getMethodName ( ) + " : " ) . append ( elem . getLineNumber ( ) + " ) \ n " ) ;
}
Main . getMain ( ) . broadcastAdmin ( " Excpetion in crawler: " + t . toString ( ) + " \ n " + sb . toString ( ) ) ;
crawl = false ;
Main . getMain ( ) . stop ( ) ;
2018-07-16 23:22:32 +02:00
}
2018-10-12 17:22:36 +02:00
profiler . endSection ( ) ; //main
2018-07-15 21:30:12 +02:00
}
2018-11-14 11:39:21 +01:00
profiler . startSection ( " waitforthreads " ) ;
for ( CrawlerThread ct : threads ) {
try {
ct . thread . join ( ) ;
} catch ( InterruptedException ignore ) { }
}
log . info ( " All Threads Terminated. " ) ;
profiler . endStartSection ( " insertback " ) ;
ArrayList < String > putback = new ArrayList < > ( threadcount * threads . get ( 0 ) . undoneSize ( ) ) ; //create list with approximated size
for ( CrawlerThread ct : threads ) {
putback . addAll ( ct . undone ( ) ) ;
}
db . storeTemp ( putback , true ) ;
profiler . endSection ( ) ; //insertback
2018-10-11 15:32:19 +02:00
profiler . endSection ( ) ; //root
log . info ( " Profiler: " ) ;
for ( String s : profiler . getTreeView ( ) ) {
log . info ( s ) ;
}
2018-07-16 23:22:32 +02:00
//end
long runtimes = ( System . currentTimeMillis ( ) - start ) / 1000 ;
2018-07-19 20:34:11 +02:00
if ( runtimes < 0 )
runtimes = 1 ;
2018-07-16 23:22:32 +02:00
int runtimem = ( int ) ( runtimes / 60 ) ;
2018-07-17 13:42:06 +02:00
float vidps = ( crawlcount / ( float ) runtimes ) ; //videos per second
log . info ( " Crawling Stopped. Runtime: " + runtimem + " min and " + crawlcount + " videos crawled. ( " + vidps + " v/s ) " ) ;
2018-10-11 15:32:19 +02:00
Main . getMain ( ) . stopcallback ( ) ;
2018-07-15 21:30:12 +02:00
}
2018-07-19 17:59:26 +02:00
public DB getDB ( ) {
return db ;
}
2018-07-20 19:57:43 +02:00
2018-07-15 21:30:12 +02:00
public static Video getVideo ( ) {
return new Video ( ) ;
}
2018-09-11 00:05:10 +02:00
public String getStats ( ) {
2018-07-19 20:34:11 +02:00
long runtimes = ( System . currentTimeMillis ( ) - start ) / 1000 ;
if ( runtimes < 0 )
runtimes = 1 ;
float vidps = ( crawlcount / ( float ) runtimes ) ; //videos per second
2018-09-11 00:05:10 +02:00
int runtimem = ( int ) ( runtimes / 60 ) ;
2018-07-23 12:27:51 +02:00
String out = " " ;
out + = " ToCrawl: " + toCrawl . size ( ) ;
2018-10-11 15:32:19 +02:00
//out += "\nToknown: " + toknown.size();
2018-07-23 12:27:51 +02:00
out + = " \ nToSave: " + toSave . size ( ) ;
out + = " \ nrequested: " + requested . size ( ) ;
2018-10-04 22:52:18 +02:00
out + = " \ nRandomBuffer: " + db . getRandomCount ( ) ;
2018-07-23 12:27:51 +02:00
out + = " \ nRuntime: " + runtimem + " min and " + crawlcount + " videos crawled. ( " + vidps + " v/s ) " ;
2018-10-11 15:32:19 +02:00
out + = " \ nprofiler: " + profiler . getNameOfLastSection ( ) ;
2018-10-08 11:22:45 +02:00
out + = " \ nDBSize: " + db . getDBSize ( ) ;
2018-10-11 15:32:19 +02:00
if ( threads ! = null ) {
out + = " \ nThread Nr, todo size, requested, crawledsize, foundsize " ;
for ( int i = 0 ; i < threads . size ( ) ; i + + ) {
CrawlerThread thre = threads . get ( i ) ;
out + = " \ n " + i + " " + ( thre . lockforreport ? " \ uD83D \ uDD12 " : " \ uD83D \ uDD13 " ) + " " + thre . todo . size ( ) + " " + thre . requested + " " + thre . crawled . size ( ) + " " + thre . found . size ( ) ;
}
2018-07-23 12:27:51 +02:00
}
return out ;
}
2018-10-11 15:32:19 +02:00
public LinkedList < String > getProfiling ( ) {
return profiler . getTreeView ( ) ;
}
2018-07-23 12:27:51 +02:00
/ * *
* Updates old entrys of the DB . currently unused .
* /
2018-10-12 17:22:36 +02:00
/ *
2018-07-23 12:27:51 +02:00
private void updateDB ( ) {
log . info ( " updating DB Offset= " + updateOffset ) ;
LinkedList < String > vids = db . getUncompleted ( 50 , updateOffset ) ;
LinkedList < Video > [ ] infos = api . getInfos ( vids ) ;
if ( infos ! = null ) {
int size = infos [ 0 ] . size ( ) + infos [ 1 ] . size ( ) ;
if ( size < 50 ) {
updateOffset + = ( ( 50 - size ) / 2 ) + 1 ;
}
if ( infos [ 1 ] . size ( ) > 0 ) {
log . info ( " delete " + infos [ 1 ] . size ( ) + " livestreams " ) ;
db . removeVideos ( infos [ 1 ] ) ;
}
db . updateVideos ( infos [ 0 ] ) ;
log . info ( " Updated " + infos [ 0 ] . size ( ) + " Videos. " ) ;
2018-07-20 19:57:43 +02:00
}
2018-07-19 20:34:11 +02:00
}
2018-10-12 17:22:36 +02:00
* /
2018-07-19 20:34:11 +02:00
2018-07-15 21:30:12 +02:00
public static class Video {
2018-07-20 19:57:43 +02:00
String id = " " ;
String title = " " ;
String channel = " " ;
String tags = " " ;
int length = 0 ; //the length of the video in seconds
String languageCode = " " ;
byte categorie = 0 ;
long created = 0 ;
boolean live = false ;
2018-07-15 21:30:12 +02:00
}
2018-07-19 20:34:11 +02:00
2018-07-15 21:30:12 +02:00
}
2018-11-14 11:39:21 +01:00