Skip to content

Commit 4ba1c67

Browse files
committed
[DO NOT MERGE][skip ci] JAVA 17 BWARE COMMIT
1 parent fd1ba7c commit 4ba1c67

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1655
-417
lines changed

bin/systemds

+4
Original file line numberDiff line numberDiff line change
@@ -413,6 +413,7 @@ if [ $WORKER == 1 ]; then
413413
print_out "# starting Federated worker on port $PORT"
414414
CMD=" \
415415
java $SYSTEMDS_STANDALONE_OPTS \
416+
--add-modules=jdk.incubator.vector \
416417
$LOG4JPROPFULL \
417418
-jar $SYSTEMDS_JAR_FILE \
418419
-w $PORT \
@@ -422,6 +423,7 @@ elif [ "$FEDMONITORING" == 1 ]; then
422423
print_out "# starting Federated backend monitoring on port $PORT"
423424
CMD=" \
424425
java $SYSTEMDS_STANDALONE_OPTS \
426+
--add-modules=jdk.incubator.vector \
425427
$LOG4JPROPFULL \
426428
-jar $SYSTEMDS_JAR_FILE \
427429
-fedMonitoring $PORT \
@@ -433,6 +435,7 @@ elif [ $SYSDS_DISTRIBUTED == 0 ]; then
433435
CMD=" \
434436
java $SYSTEMDS_STANDALONE_OPTS \
435437
$LOG4JPROPFULL \
438+
--add-modules=jdk.incubator.vector \
436439
-jar $SYSTEMDS_JAR_FILE \
437440
-f $SCRIPT_FILE \
438441
-exec $SYSDS_EXEC_MODE \
@@ -442,6 +445,7 @@ else
442445
print_out "# Running script $SCRIPT_FILE distributed with opts: $*"
443446
CMD=" \
444447
spark-submit $SYSTEMDS_DISTRIBUTED_OPTS \
448+
--add-modules=jdk.incubator.vector \
445449
$SYSTEMDS_JAR_FILE \
446450
-f $SCRIPT_FILE \
447451
-exec $SYSDS_EXEC_MODE \

pom.xml

+9-3
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@
6767
<!-- aws-java-sdk-bundle version should align with hadoop-aws version -->
6868
<!-- aws-java-sdk-bundle.version>1.12.367</aws-java-sdk-bundle.version -->
6969
<!-- Set java compile level via argument, ex: 1.8 1.9 10 11-->
70-
<java.level>11</java.level>
70+
<java.level>17</java.level>
7171
<java.version>{java.level}</java.version>
7272
<!-->Testing settings<!-->
7373
<maven.test.skip>false</maven.test.skip>
@@ -77,6 +77,7 @@
7777
<test-forkCount>1C</test-forkCount>
7878
<rerun.failing.tests.count>2</rerun.failing.tests.count>
7979
<jacoco.skip>false</jacoco.skip>
80+
<doc.skip>false</doc.skip>
8081
<jacoco.include>**</jacoco.include>
8182
<automatedtestbase.outputbuffering>false</automatedtestbase.outputbuffering>
8283
<argLine>-Xms3000m -Xmx3000m -Xmn300m</argLine>
@@ -345,6 +346,9 @@
345346
<source>${java.level}</source>
346347
<target>${java.level}</target>
347348
<release>${java.level}</release>
349+
<compilerArgs>
350+
<arg>--add-modules=jdk.incubator.vector</arg>
351+
</compilerArgs>
348352
</configuration>
349353
</plugin>
350354

@@ -367,6 +371,7 @@
367371
<systemPropertyVariables>
368372
<log4j.configurationFile>file:src/test/resources/log4j.properties</log4j.configurationFile>
369373
</systemPropertyVariables>
374+
<argLine>--add-modules=jdk.incubator.vector</argLine>
370375
</configuration>
371376
</plugin>
372377

@@ -875,9 +880,10 @@
875880
<configuration>
876881
<excludePackageNames>*.protobuf</excludePackageNames>
877882
<notimestamp>true</notimestamp>
878-
<failOnWarnings>true</failOnWarnings>
883+
<failOnWarnings>false</failOnWarnings>
879884
<quiet>true</quiet>
880-
<skip>false</skip>
885+
<additionalJOption>--add-modules=jdk.incubator.vector</additionalJOption>
886+
<skip>${doc.skip}</skip>
881887
<show>public</show>
882888
<source>${java.level}</source>
883889
</configuration>

src/main/java/org/apache/sysds/hops/AggBinaryOp.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -439,8 +439,7 @@ private boolean isApplicableForTransitiveSparkExecType(boolean left)
439439
|| (left && !isLeftTransposeRewriteApplicable(true)))
440440
&& getInput(index).getParent().size()==1 //bagg is only parent
441441
&& !getInput(index).areDimsBelowThreshold()
442-
&& (getInput(index).optFindExecType() == ExecType.SPARK
443-
|| (getInput(index) instanceof DataOp && ((DataOp)getInput(index)).hasOnlyRDD()))
442+
&& getInput(index).hasSparkOutput()
444443
&& getInput(index).getOutputMemEstimate()>getOutputMemEstimate();
445444
}
446445

src/main/java/org/apache/sysds/hops/BinaryOp.java

+24-14
Original file line numberDiff line numberDiff line change
@@ -747,8 +747,8 @@ protected ExecType optFindExecType(boolean transitive) {
747747

748748
checkAndSetForcedPlatform();
749749

750-
DataType dt1 = getInput().get(0).getDataType();
751-
DataType dt2 = getInput().get(1).getDataType();
750+
final DataType dt1 = getInput(0).getDataType();
751+
final DataType dt2 = getInput(1).getDataType();
752752

753753
if( _etypeForced != null ) {
754754
setExecType(_etypeForced);
@@ -796,18 +796,28 @@ else if ( dt1 == DataType.SCALAR && dt2 == DataType.MATRIX ) {
796796
checkAndSetInvalidCPDimsAndSize();
797797
}
798798

799-
//spark-specific decision refinement (execute unary scalar w/ spark input and
799+
// spark-specific decision refinement (execute unary scalar w/ spark input and
800800
// single parent also in spark because it's likely cheap and reduces intermediates)
801-
if(transitive && _etype == ExecType.CP && _etypeForced != ExecType.CP && _etypeForced != ExecType.FED &&
802-
getDataType().isMatrix() // output should be a matrix
803-
&& (dt1.isScalar() || dt2.isScalar()) // one side should be scalar
804-
&& supportsMatrixScalarOperations() // scalar operations
805-
&& !(getInput().get(dt1.isScalar() ? 1 : 0) instanceof DataOp) // input is not checkpoint
806-
&& getInput().get(dt1.isScalar() ? 1 : 0).getParent().size() == 1 // unary scalar is only parent
807-
&& !HopRewriteUtils.isSingleBlock(getInput().get(dt1.isScalar() ? 1 : 0)) // single block triggered exec
808-
&& getInput().get(dt1.isScalar() ? 1 : 0).optFindExecType() == ExecType.SPARK) {
809-
// pull unary scalar operation into spark
810-
_etype = ExecType.SPARK;
801+
if(transitive // we allow transitive Spark operations. continue sequences of spark operations
802+
&& _etype == ExecType.CP // The instruction is currently in CP
803+
&& _etypeForced != ExecType.CP // not forced CP
804+
&& _etypeForced != ExecType.FED // not federated
805+
&& (getDataType().isMatrix() || getDataType().isFrame()) // output should be a matrix or frame
806+
) {
807+
final boolean v1 = getInput(0).isScalarOrVectorBellowBlockSize();
808+
final boolean v2 = getInput(1).isScalarOrVectorBellowBlockSize();
809+
final boolean left = v1 == true; // left side is the vector or scalar
810+
final Hop sparkIn = getInput(left ? 1 : 0);
811+
if((v1 ^ v2) // XOR only one side is allowed to be a vector or a scalar.
812+
&& (supportsMatrixScalarOperations() || op == OpOp2.APPLY_SCHEMA) // supported operation
813+
&& sparkIn.getParent().size() == 1 // only one parent
814+
&& !HopRewriteUtils.isSingleBlock(sparkIn) // single block triggered exec
815+
&& sparkIn.optFindExecType() == ExecType.SPARK // input was spark op.
816+
&& !(sparkIn instanceof DataOp) // input is not checkpoint
817+
) {
818+
// pull operation into spark
819+
_etype = ExecType.SPARK;
820+
}
811821
}
812822

813823
if( OptimizerUtils.ALLOW_BINARY_UPDATE_IN_PLACE &&
@@ -837,7 +847,7 @@ else if( (op == OpOp2.CBIND && getDataType().isList())
837847
|| (op == OpOp2.RBIND && getDataType().isList())) {
838848
_etype = ExecType.CP;
839849
}
840-
850+
841851
//mark for recompile (forever)
842852
setRequiresRecompileIfNecessary();
843853

src/main/java/org/apache/sysds/hops/Hop.java

+11
Original file line numberDiff line numberDiff line change
@@ -1040,6 +1040,12 @@ public final String toString() {
10401040
// ========================================================================================
10411041

10421042

1043+
protected boolean isScalarOrVectorBellowBlockSize(){
1044+
return getDataType().isScalar() || (dimsKnown() &&
1045+
(( _dc.getRows() == 1 && _dc.getCols() < ConfigurationManager.getBlocksize())
1046+
|| _dc.getCols() == 1 && _dc.getRows() < ConfigurationManager.getBlocksize()));
1047+
}
1048+
10431049
protected boolean isVector() {
10441050
return (dimsKnown() && (_dc.getRows() == 1 || _dc.getCols() == 1) );
10451051
}
@@ -1624,6 +1630,11 @@ protected void setMemoryAndComputeEstimates(Lop lop) {
16241630
lop.setComputeEstimate(ComputeCost.getHOPComputeCost(this));
16251631
}
16261632

1633+
protected boolean hasSparkOutput(){
1634+
return (this.optFindExecType() == ExecType.SPARK
1635+
|| (this instanceof DataOp && ((DataOp)this).hasOnlyRDD()));
1636+
}
1637+
16271638
/**
16281639
* Set parse information.
16291640
*

src/main/java/org/apache/sysds/hops/UnaryOp.java

+24-10
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,11 @@ protected double computeOutputMemEstimate( long dim1, long dim2, long nnz )
366366
} else {
367367
sparsity = OptimizerUtils.getSparsity(dim1, dim2, nnz);
368368
}
369-
return OptimizerUtils.estimateSizeExactSparsity(dim1, dim2, sparsity);
369+
370+
if(getDataType() == DataType.FRAME)
371+
return OptimizerUtils.estimateSizeExactFrame(dim1, dim2);
372+
else
373+
return OptimizerUtils.estimateSizeExactSparsity(dim1, dim2, sparsity);
370374
}
371375

372376
@Override
@@ -463,6 +467,13 @@ public boolean isMetadataOperation() {
463467
|| _op == OpOp1.CAST_AS_LIST;
464468
}
465469

470+
private boolean isDisallowedSparkOps(){
471+
return isCumulativeUnaryOperation()
472+
|| isCastUnaryOperation()
473+
|| _op==OpOp1.MEDIAN
474+
|| _op==OpOp1.IQM;
475+
}
476+
466477
@Override
467478
protected ExecType optFindExecType(boolean transitive)
468479
{
@@ -493,19 +504,22 @@ else if ( getInput().get(0).areDimsBelowThreshold() || getInput().get(0).isVecto
493504
checkAndSetInvalidCPDimsAndSize();
494505
}
495506

507+
496508
//spark-specific decision refinement (execute unary w/ spark input and
497509
//single parent also in spark because it's likely cheap and reduces intermediates)
498-
if( _etype == ExecType.CP && _etypeForced != ExecType.CP
499-
&& getInput().get(0).optFindExecType() == ExecType.SPARK
500-
&& getDataType().isMatrix()
501-
&& !isCumulativeUnaryOperation() && !isCastUnaryOperation()
502-
&& _op!=OpOp1.MEDIAN && _op!=OpOp1.IQM
503-
&& !(getInput().get(0) instanceof DataOp) //input is not checkpoint
504-
&& getInput().get(0).getParent().size()==1 ) //unary is only parent
505-
{
510+
if(_etype == ExecType.CP // currently CP instruction
511+
&& _etype != ExecType.SPARK /// currently not SP.
512+
&& _etypeForced != ExecType.CP // not forced as CP instruction
513+
&& getInput(0).hasSparkOutput() // input is a spark instruction
514+
&& (getDataType().isMatrix() || getDataType().isFrame()) // output is a matrix or frame
515+
&& !isDisallowedSparkOps() // is invalid spark instruction
516+
// && !(getInput().get(0) instanceof DataOp) // input is not checkpoint
517+
// && getInput(0).getParent().size() <= 1// unary is only parent
518+
) {
506519
//pull unary operation into spark
507520
_etype = ExecType.SPARK;
508521
}
522+
509523

510524
//mark for recompile (forever)
511525
setRequiresRecompileIfNecessary();
@@ -520,7 +534,7 @@ && getInput().get(0).getParent().size()==1 ) //unary is only parent
520534
} else {
521535
setRequiresRecompileIfNecessary();
522536
}
523-
537+
524538
return _etype;
525539
}
526540

src/main/java/org/apache/sysds/runtime/compress/CompressedMatrixBlock.java

+4-18
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@
5858
import org.apache.sysds.runtime.compress.lib.CLALibMMChain;
5959
import org.apache.sysds.runtime.compress.lib.CLALibMatrixMult;
6060
import org.apache.sysds.runtime.compress.lib.CLALibMerge;
61+
import org.apache.sysds.runtime.compress.lib.CLALibReorg;
6162
import org.apache.sysds.runtime.compress.lib.CLALibReplace;
6263
import org.apache.sysds.runtime.compress.lib.CLALibReshape;
6364
import org.apache.sysds.runtime.compress.lib.CLALibRexpand;
@@ -72,7 +73,6 @@
7273
import org.apache.sysds.runtime.data.DenseBlock;
7374
import org.apache.sysds.runtime.data.SparseBlock;
7475
import org.apache.sysds.runtime.data.SparseRow;
75-
import org.apache.sysds.runtime.functionobjects.SwapIndex;
7676
import org.apache.sysds.runtime.instructions.InstructionUtils;
7777
import org.apache.sysds.runtime.instructions.cp.CM_COV_Object;
7878
import org.apache.sysds.runtime.instructions.cp.ScalarObject;
@@ -633,21 +633,7 @@ public MatrixBlock replaceOperations(MatrixValue result, double pattern, double
633633

634634
@Override
635635
public MatrixBlock reorgOperations(ReorgOperator op, MatrixValue ret, int startRow, int startColumn, int length) {
636-
if(op.fn instanceof SwapIndex && this.getNumColumns() == 1) {
637-
MatrixBlock tmp = decompress(op.getNumThreads());
638-
long nz = tmp.setNonZeros(tmp.getNonZeros());
639-
tmp = new MatrixBlock(tmp.getNumColumns(), tmp.getNumRows(), tmp.getDenseBlockValues());
640-
tmp.setNonZeros(nz);
641-
return tmp;
642-
}
643-
else {
644-
// Allow transpose to be compressed output. In general we need to have a transposed flag on
645-
// the compressed matrix. https://issues.apache.org/jira/browse/SYSTEMDS-3025
646-
String message = op.getClass().getSimpleName() + " -- " + op.fn.getClass().getSimpleName();
647-
MatrixBlock tmp = getUncompressed(message, op.getNumThreads());
648-
return tmp.reorgOperations(op, ret, startRow, startColumn, length);
649-
}
650-
636+
return CLALibReorg.reorg(this, op, (MatrixBlock) ret, startRow, startColumn, length);
651637
}
652638

653639
public boolean isOverlapping() {
@@ -1215,8 +1201,8 @@ public void examSparsity(boolean allowCSR, int k) {
12151201
}
12161202

12171203
@Override
1218-
public void sparseToDense(int k) {
1219-
// do nothing
1204+
public MatrixBlock sparseToDense(int k) {
1205+
return this; // do nothing
12201206
}
12211207

12221208
@Override

src/main/java/org/apache/sysds/runtime/compress/CompressedMatrixBlockFactory.java

+3
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,9 @@ public class CompressedMatrixBlockFactory {
8787
/** Compression information gathered through the sampling, used for the actual compression decided */
8888
private CompressedSizeInfo compressionGroups;
8989

90+
// /** Indicate if the compression aborts we should decompress*/
91+
// private boolean shouldDecompress = false;
92+
9093
private CompressedMatrixBlockFactory(MatrixBlock mb, int k, CompressionSettingsBuilder compSettings,
9194
ACostEstimate costEstimator) {
9295
this(mb, k, compSettings.create(), costEstimator);

0 commit comments

Comments
 (0)