@@ -179,9 +179,9 @@ impl RawQueryResult {
179
179
. collect :: < Vec < Result < RecordBatch , ArrowError > > > ( )
180
180
. await ;
181
181
182
- return Ok ( QueryResult :: Arrow (
182
+ Ok ( QueryResult :: Arrow (
183
183
arrow_records. into_iter ( ) . map ( Result :: unwrap) . collect ( ) ,
184
- ) ) ;
184
+ ) )
185
185
}
186
186
RawQueryResult :: Json ( j) => Ok ( QueryResult :: Json ( j) ) ,
187
187
RawQueryResult :: Empty => Ok ( QueryResult :: Empty ) ,
@@ -201,12 +201,11 @@ impl RawQueryResult {
201
201
let batch_stream = bytes_stream. flat_map ( |bytes_result| match bytes_result {
202
202
Ok ( bytes) => match Self :: bytes_to_batches ( bytes) {
203
203
Ok ( batches) => futures:: stream:: iter ( batches. into_iter ( ) . map ( Ok ) ) . boxed ( ) ,
204
- Err ( e) => futures:: stream:: once ( async move { Err ( ArrowError :: from ( e ) ) } ) . boxed ( ) ,
204
+ Err ( e) => futures:: stream:: once ( async move { Err ( e ) } ) . boxed ( ) ,
205
205
} ,
206
206
Err ( e) => futures:: stream:: once ( async move {
207
207
Err ( ArrowError :: ParseError ( format ! (
208
- "Unable to parse RecordBatch due to error in bytes stream: {}" ,
209
- e. to_string( )
208
+ "Unable to parse RecordBatch due to error in bytes stream: {e}"
210
209
) ) )
211
210
} )
212
211
. boxed ( ) ,
@@ -486,7 +485,7 @@ impl SnowflakeApi {
486
485
sql : & str ,
487
486
enable_streaming : bool ,
488
487
) -> Result < RawQueryResult , SnowflakeApiError > {
489
- let mut resp = self
488
+ let resp = self
490
489
. run_sql :: < ExecResponse > ( sql, QueryType :: ArrowQuery )
491
490
. await ?;
492
491
log:: debug!( "Got query response: {:?}" , resp) ;
@@ -571,7 +570,7 @@ impl SnowflakeApi {
571
570
572
571
Ok ( resp)
573
572
}
574
-
573
+
575
574
fn chunks_to_bytes_stream ( & self , data : & QueryExecResponseData ) -> RawQueryResult {
576
575
let chunk_urls = data
577
576
. chunks
0 commit comments