val sc = SparkContext.getOrCreate()
val cl = new UserListClassLoader(sc.jars.map(new URL(_)).toArray,
Thread.currentThread.getContextClassLoader, List(
"org.apache.parquet.hadoop.OurCustomParquetOutputFormat",
"org.apache.parquet.hadoop.CodecFactory",
"org.apache.parquet.hadoop.ParquetFileWriter",
"org.apache.parquet.hadoop.ParquetRecordWriter",
"org.apache.parquet.hadoop.InternalParquetRecordWriter",
"org.apache.parquet.hadoop.ColumnChunkPageWriteStore",
"org.apache.parquet.hadoop.MemoryManager"
))
val format = cl.
loadClass("org.apache.parquet.hadoop.OurCustomParquetOutputFormat").
newInstance().asInstanceOf[FileOutputFormat[Void, T]]