Toggle navigation
Home
double to org.apache.ignite.ml.math.Vector
No. of Instances - 155
No. of Commits - 2
No. of Projects - {'ignite'}
Hierarchy/Composition: -
Primitive Info: -
NameSpace: Jdk -> Internal
Mapping:
Rename Variable
getModel
to
getMdl
model
to
mdl
model
to
mdl
Add or Remove Method invocation
row.length
to
row.size()
row.length
to
row.size()
Double.valueOf(row[i]).equals(Double.NaN)
to
Double.valueOf(row.get(i)).equals(Double.NaN)
row.length
to
row.size()
row.length
to
row.size()
featureRow.length
to
featureRow.size()
featureRow.length
to
featureRow.size()
mdl.apply(new DenseLocalOnHeapVector(features))
to
mdl.apply(features)
preprocessor.apply(i,data[i])
to
preprocessor.apply(i,VectorUtils.of(data[i])).asArray()
datasetBuilder.build((upstream,upstreamSize) -> new EmptyContext(),(upstream,upstreamSize,ctx) -> { double[] sums=null; int[] counts=null; Map<Double,Integer>[] valuesByFreq=null; while (upstream.hasNext()) { UpstreamEntry<K,V> entity=upstream.next(); double[] row=basePreprocessor.apply(entity.getKey(),entity.getValue()); switch (imputingStgy) { case MEAN: sums=calculateTheSums(row,sums); counts=calculateTheCounts(row,counts); break; case MOST_FREQUENT: valuesByFreq=calculateFrequencies(row,valuesByFreq); break; default : throw new UnsupportedOperationException("The chosen strategy is not supported"); } } ImputerPartitionData partData; switch (imputingStgy) { case MEAN: partData=new ImputerPartitionData().withSums(sums).withCounts(counts); break; case MOST_FREQUENT: partData=new ImputerPartitionData().withValuesByFrequency(valuesByFreq); break; default : throw new UnsupportedOperationException("The chosen strategy is not supported"); } return partData; } )
to
datasetBuilder.build((upstream,upstreamSize) -> new EmptyContext(),(upstream,upstreamSize,ctx) -> { double[] sums=null; int[] counts=null; Map<Double,Integer>[] valuesByFreq=null; while (upstream.hasNext()) { UpstreamEntry<K,V> entity=upstream.next(); Vector row=basePreprocessor.apply(entity.getKey(),entity.getValue()); switch (imputingStgy) { case MEAN: sums=calculateTheSums(row,sums); counts=calculateTheCounts(row,counts); break; case MOST_FREQUENT: valuesByFreq=calculateFrequencies(row,valuesByFreq); break; default : throw new UnsupportedOperationException("The chosen strategy is not supported"); } } ImputerPartitionData partData; switch (imputingStgy) { case MEAN: partData=new ImputerPartitionData().withSums(sums).withCounts(counts); break; case MOST_FREQUENT: partData=new ImputerPartitionData().withValuesByFrequency(valuesByFreq); break; default : throw new UnsupportedOperationException("The chosen strategy is not supported"); } return partData; } )
calculateImputingValuesBySumsAndCounts(dataset)
to
VectorUtils.of(calculateImputingValuesByFrequencies(dataset))
calculateImputingValuesByFrequencies(dataset)
to
VectorUtils.of(calculateImputingValuesBySumsAndCounts(dataset))
currComposition.apply(VectorUtils.of(featureExtractor.apply(k,v)))
to
currComposition.apply(featureExtractor.apply(k,v))
res.length
to
res.size()
imputingValues.length
to
imputingValues.size()
featureMapping.forEach((localId,featureValueId) -> newFeaturesValues[localId]=featureValues[featureValueId])
to
featureMapping.forEach((localId,featureValueId) -> newFeaturesValues[localId]=featureValues.get(featureValueId))
row.length
to
row.size()
row.length
to
row.size()
row.length
to
row.size()
row.length
to
row.size()
res.length
to
res.size()
imputingValues.length
to
imputingValues.size()
mdl.apply(new DenseLocalOnHeapVector(features))
to
mdl.apply(features)
res.length
to
res.size()
res.length
to
res.size()
apply
to
asArray
row.length
to
row.size()
row.length
to
row.size()
datasetBuilder.build((upstream,upstreamSize) -> new EmptyContext(),(upstream,upstreamSize,ctx) -> { double[] min=null; double[] max=null; while (upstream.hasNext()) { UpstreamEntry<K,V> entity=upstream.next(); double[] row=basePreprocessor.apply(entity.getKey(),entity.getValue()); if (min == null) { min=new double[row.length]; for (int i=0; i < min.length; i++) min[i]=Double.MAX_VALUE; } else assert min.length == row.length : "Base preprocessor must return exactly " + min.length + " features"; if (max == null) { max=new double[row.length]; for (int i=0; i < max.length; i++) max[i]=-Double.MAX_VALUE; } else assert max.length == row.length : "Base preprocessor must return exactly " + min.length + " features"; for (int i=0; i < row.length; i++) { if (row[i] < min[i]) min[i]=row[i]; if (row[i] > max[i]) max[i]=row[i]; } } return new MinMaxScalerPartitionData(min,max); } )
to
datasetBuilder.build((upstream,upstreamSize) -> new EmptyContext(),(upstream,upstreamSize,ctx) -> { double[] min=null; double[] max=null; while (upstream.hasNext()) { UpstreamEntry<K,V> entity=upstream.next(); Vector row=basePreprocessor.apply(entity.getKey(),entity.getValue()); if (min == null) { min=new double[row.size()]; for (int i=0; i < min.length; i++) min[i]=Double.MAX_VALUE; } else assert min.length == row.size() : "Base preprocessor must return exactly " + min.length + " features"; if (max == null) { max=new double[row.size()]; for (int i=0; i < max.length; i++) max[i]=-Double.MAX_VALUE; } else assert max.length == row.size() : "Base preprocessor must return exactly " + min.length + " features"; for (int i=0; i < row.size(); i++) { if (row.get(i) < min[i]) min[i]=row.get(i); if (row.get(i) > max[i]) max[i]=row.get(i); } } return new MinMaxScalerPartitionData(min,max); } )
featureMapping.forEach((localId,featureValueId) -> newFeaturesValues[localId]=featureValues[featureValueId])
to
featureMapping.forEach((localId,featureValueId) -> newFeaturesValues[localId]=featureValues.get(featureValueId))
preprocessor.apply(i,data[i])
to
preprocessor.apply(i,VectorUtils.of(data[i])).asArray()
preprocessor.apply(5,new double[]{Double.NaN,0,Double.NaN})
to
preprocessor.apply(5,VectorUtils.of(Double.NaN,0,Double.NaN)).asArray()
Wrap or Un-wrap
put
to
put
put
to
put
put
to
put
Cascading Type Change (Different)
IgniteFunction<double[],double[]>
to
IgniteFunction<Vector,Vector>
IgniteFunction<double[],double[]>
to
IgniteFunction<Vector,Vector>
Other
res
to
res
row
to
row