11package is .hail .backend
22
3- import is .hail .asm4s ._
4- import is .hail .backend .Backend .jsonToBytes
3+ import is .hail .asm4s .HailClassLoader
54import is .hail .backend .spark .SparkBackend
6- import is .hail .expr .ir .{IR , IRParser , LoweringAnalyses , SortField , TableIR , TableReader }
5+ import is .hail .expr .ir .{IR , LoweringAnalyses , SortField , TableIR , TableReader }
76import is .hail .expr .ir .lowering .{TableStage , TableStageDependency }
87import is .hail .io .{BufferSpec , TypedCodecSpec }
9- import is .hail .io .fs ._
10- import is .hail .io .plink .LoadPlink
11- import is .hail .io .vcf .LoadVCF
12- import is .hail .types ._
8+ import is .hail .io .fs .FS
9+ import is .hail .types .RTable
1310import is .hail .types .encoded .EType
1411import is .hail .types .physical .PTuple
15- import is .hail .types .virtual .TFloat64
16- import is .hail .utils ._
17- import is .hail .variant .ReferenceGenome
12+ import is .hail .utils .ExecutionTimer .Timings
13+ import is .hail .utils .fatal
1814
1915import scala .reflect .ClassTag
2016
21- import java .io ._
17+ import java .io .{ Closeable , OutputStream }
2218import java .nio .charset .StandardCharsets
2319
2420import com .fasterxml .jackson .core .StreamReadConstraints
25- import org .json4s ._
21+ import org .json4s .JValue
2622import org .json4s .jackson .JsonMethods
2723import sourcecode .Enclosing
2824
@@ -39,16 +35,15 @@ object Backend {
3935 ctx : ExecuteContext ,
4036 t : PTuple ,
4137 off : Long ,
42- bufferSpecString : String ,
38+ bufferSpec : BufferSpec ,
4339 os : OutputStream ,
4440 ): Unit = {
45- val bs = BufferSpec .parseOrDefault(bufferSpecString)
4641 assert(t.size == 1 )
4742 val elementType = t.fields(0 ).typ
4843 val codec = TypedCodecSpec (
4944 EType .fromPythonTypeEncoding(elementType.virtualType),
5045 elementType.virtualType,
51- bs ,
46+ bufferSpec ,
5247 )
5348 assert(t.isFieldDefined(off, 0 ))
5449 codec.encode(ctx, elementType, t.loadField(off, 0 ), os)
@@ -96,8 +91,8 @@ abstract class Backend extends Closeable {
9691
9792 def close (): Unit
9893
99- def asSpark (op : String ): SparkBackend =
100- fatal(s " ${getClass.getSimpleName}: $op requires SparkBackend " )
94+ def asSpark (implicit E : Enclosing ): SparkBackend =
95+ fatal(s " ${getClass.getSimpleName}: ${ E .value} requires SparkBackend " )
10196
10297 def shouldCacheQueryInfo : Boolean = true
10398
@@ -132,70 +127,7 @@ abstract class Backend extends Closeable {
132127 def tableToTableStage (ctx : ExecuteContext , inputIR : TableIR , analyses : LoweringAnalyses )
133128 : TableStage
134129
135- def withExecuteContext [T ](f : ExecuteContext => T )(implicit E : Enclosing ): T
136-
137- final def valueType (s : String ): Array [Byte ] =
138- withExecuteContext { ctx =>
139- jsonToBytes {
140- IRParser .parse_value_ir(ctx, s).typ.toJSON
141- }
142- }
143-
144- final def tableType (s : String ): Array [Byte ] =
145- withExecuteContext { ctx =>
146- jsonToBytes {
147- IRParser .parse_table_ir(ctx, s).typ.toJSON
148- }
149- }
150-
151- final def matrixTableType (s : String ): Array [Byte ] =
152- withExecuteContext { ctx =>
153- jsonToBytes {
154- IRParser .parse_matrix_ir(ctx, s).typ.toJSON
155- }
156- }
157-
158- final def blockMatrixType (s : String ): Array [Byte ] =
159- withExecuteContext { ctx =>
160- jsonToBytes {
161- IRParser .parse_blockmatrix_ir(ctx, s).typ.toJSON
162- }
163- }
164-
165- def loadReferencesFromDataset (path : String ): Array [Byte ]
166-
167- def fromFASTAFile (
168- name : String ,
169- fastaFile : String ,
170- indexFile : String ,
171- xContigs : Array [String ],
172- yContigs : Array [String ],
173- mtContigs : Array [String ],
174- parInput : Array [String ],
175- ): Array [Byte ] =
176- withExecuteContext { ctx =>
177- jsonToBytes {
178- ReferenceGenome .fromFASTAFile(ctx, name, fastaFile, indexFile,
179- xContigs, yContigs, mtContigs, parInput).toJSON
180- }
181- }
182-
183- def parseVCFMetadata (path : String ): Array [Byte ] =
184- withExecuteContext { ctx =>
185- jsonToBytes {
186- val metadata = LoadVCF .parseHeaderMetadata(ctx.fs, Set .empty, TFloat64 , path)
187- implicit val formats = defaultJSONFormats
188- Extraction .decompose(metadata)
189- }
190- }
191-
192- def importFam (path : String , isQuantPheno : Boolean , delimiter : String , missingValue : String )
193- : Array [Byte ] =
194- withExecuteContext { ctx =>
195- LoadPlink .importFamJSON(ctx.fs, path, isQuantPheno, delimiter, missingValue).getBytes(
196- StandardCharsets .UTF_8
197- )
198- }
130+ def withExecuteContext [T ](f : ExecuteContext => T )(implicit E : Enclosing ): (T , Timings )
199131
200132 def execute (ctx : ExecuteContext , ir : IR ): Either [Unit , (PTuple , Long )]
201133}
0 commit comments