Skip to content

Commit

Permalink
[hotfix] Minutiae
Browse files Browse the repository at this point in the history
  • Loading branch information
greghogan committed May 12, 2017
1 parent c9ffa9d commit 423f4d6
Show file tree
Hide file tree
Showing 20 changed files with 107 additions and 99 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -860,7 +860,7 @@ protected int executeProgram(PackagedProgram program, ClusterClient client, int
Map<String, Object> accumulatorsResult = execResult.getAllAccumulatorResults();
if (accumulatorsResult.size() > 0) {
System.out.println("Accumulator Results: ");
System.out.println(AccumulatorHelper.getResultsFormated(accumulatorsResult));
System.out.println(AccumulatorHelper.getResultsFormatted(accumulatorsResult));
}
} else {
logAndSysout("Job has been submitted with JobID " + result.getJobID());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,6 @@

package org.apache.flink.api.io.avro;

import static org.junit.Assert.*;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.*;

import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
Expand Down Expand Up @@ -59,6 +51,22 @@
import org.junit.Before;
import org.junit.Test;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;

/**
* Test the avro input format.
* (The testcase is mostly the getting started tutorial of avro)
Expand Down Expand Up @@ -273,7 +281,7 @@ public void testDeserializeToGenericType() throws IOException {
DatumReader<GenericData.Record> datumReader = new GenericDatumReader<>(userSchema);

try (FileReader<GenericData.Record> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
// initialize Record by reading it from disk (thats easier than creating it by hand)
// initialize Record by reading it from disk (that's easier than creating it by hand)
GenericData.Record rec = new GenericData.Record(userSchema);
dataFileReader.next(rec);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
/**
* A {@link Writer} that writes the bucket files as Hadoop {@link SequenceFile SequenceFiles}.
* The input to the {@link BucketingSink} must
* be a {@link org.apache.flink.api.java.tuple.Tuple2} of two Hadopo
* be a {@link org.apache.flink.api.java.tuple.Tuple2} of two Hadoop
* {@link org.apache.hadoop.io.Writable Writables}.
*
* @param <K> The type of the first tuple field.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public static Map<String, Object> toResultMap(Map<String, Accumulator<?, ?>> acc
return resultMap;
}

public static String getResultsFormated(Map<String, Object> map) {
public static String getResultsFormatted(Map<String, Object> map) {
StringBuilder builder = new StringBuilder();
for (Map.Entry<String, Object> entry : map.entrySet()) {
builder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,7 @@ public static <IN, OUT> TypeInformation<OUT> getUnaryOperatorReturnType(
* @param inputTypeArgumentIndex Index of the type argument of function's first parameter
* specifying the input type if it is wrapped (Iterable, Map,
* etc.). Otherwise -1.
* @param outputTypeArgumentIndex Index of the type argument of functions second parameter
* @param outputTypeArgumentIndex Index of the type argument of function's second parameter
* specifying the output type if it is wrapped in a Collector.
* Otherwise -1.
* @param inType Type of the input elements (In case of an iterable, it is the element type)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,6 @@

package org.apache.flink.api.java.typeutils.runtime;

import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Random;

import org.junit.Assert;
import org.junit.Test;
import org.apache.flink.api.common.typeutils.SerializerTestInstance;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.GenericArraySerializer;
Expand All @@ -34,6 +28,12 @@
import org.apache.flink.api.java.typeutils.runtime.AbstractGenericTypeSerializerTest.ComplexNestedObject2;
import org.apache.flink.api.java.typeutils.runtime.AbstractGenericTypeSerializerTest.SimpleTypes;
import org.apache.flink.util.StringUtils;
import org.junit.Assert;
import org.junit.Test;

import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Random;

public abstract class AbstractGenericArraySerializerTest {

Expand Down Expand Up @@ -110,11 +110,11 @@ public void testNestedObjects() {
@Test
public void testBeanStyleObjects() {
{
Book b1 = new Book(976243875L, "The Serialization Odysse", 42);
Book b1 = new Book(976243875L, "The Serialization Odyssey", 42);
Book b2 = new Book(0L, "Debugging byte streams", 1337);
Book b3 = new Book(-1L, "Low level interfaces", 0xC0FFEE);
Book b4 = new Book(Long.MAX_VALUE, "The joy of bits and bytes", 0xDEADBEEF);
Book b5 = new Book(Long.MIN_VALUE, "Winnign a prize for creative test strings", 0xBADF00);
Book b5 = new Book(Long.MIN_VALUE, "Winning a prize for creative test strings", 0xBADF00);
Book b6 = new Book(-2L, "Distributed Systems", 0xABCDEF0123456789L);

runTests( new Book[] {b1, b2},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,14 @@ abstract public class AbstractGenericTypeComparatorTest {

@Test
public void testString() {
runTests(new String[]{
"",
runTests("",
"Lorem Ipsum Dolor Omit Longer",
"aaaa",
"abcd",
"abce",
"abdd",
"accd",
"bbcd"
});
"bbcd");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public void testNestedObjects() {
@Test
public void testBeanStyleObjects() {
{
Book b1 = new Book(976243875L, "The Serialization Odysse", 42);
Book b1 = new Book(976243875L, "The Serialization Odyssey", 42);
Book b2 = new Book(0L, "Debugging byte streams", 1337);
Book b3 = new Book(-1L, "Low level interfaces", 0xC0FFEE);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public static class FromGeneric2 {}

public static class Nested1 {
private FromNested fromNested;
private Path yodaIntervall;
private Path yodaInterval;
}

public static class ClassWithNested {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,23 @@
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WNTIES OR CONDITIONS OF ANY KIND, either express or implied.
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.types;

import java.math.BigDecimal;
import java.math.BigInteger;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.util.TestLogger;
import org.junit.Test;

import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;

import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;

public class BasicTypeInfoTest extends TestLogger {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
* Grouping is an intermediate step for a transformation on a grouped DataSet.<br>
* The following transformation can be applied on Grouping:
* <ul>
* <li>{@link UnsortedGrouping#reduce(org.apache.flink.api.common.functions.ReduceFunction)},</li>
* <li>{@link UnsortedGrouping#reduce(org.apache.flink.api.common.functions.ReduceFunction)},</li>
* <li>{@link UnsortedGrouping#reduceGroup(org.apache.flink.api.common.functions.GroupReduceFunction)}, and</li>
* <li>{@link UnsortedGrouping#aggregate(org.apache.flink.api.java.aggregation.Aggregations, int)}.</li>
* </ul>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public void testMaxByComparison() {
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(smaller, bigger));
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, smaller));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -67,7 +67,7 @@ public void testMaxByComparisonSpecialCase1() {
Assert.assertSame("SelectByMax must return the first given tuple", specialCaseBigger, maxByTuple.reduce(specialCaseBigger, bigger));
Assert.assertSame("SelectByMax must return the first given tuple", bigger, maxByTuple.reduce(bigger, specialCaseBigger));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -82,7 +82,7 @@ public void testMaxByComparisonSpecialCase2() {
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(specialCaseBigger, bigger));
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, specialCaseBigger));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -97,7 +97,7 @@ public void testMaxByComparisonMultiple() {
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(smaller, bigger));
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, smaller));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -112,7 +112,7 @@ public void testMaxByComparisonMustReturnATuple() {
Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, bigger));
Assert.assertSame("SelectByMax must return smaller tuple", smaller, maxByTuple.reduce(smaller, smaller));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -129,7 +129,7 @@ public void testMinByComparison() {
Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(smaller, bigger));
Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(bigger, smaller));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -145,7 +145,7 @@ public void testMinByComparisonSpecialCase1() {
Assert.assertSame("SelectByMin must return the first given tuple", specialCaseBigger, minByTuple.reduce(specialCaseBigger, bigger));
Assert.assertSame("SelectByMin must return the first given tuple", bigger, minByTuple.reduce(bigger, specialCaseBigger));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -161,7 +161,7 @@ public void testMinByComparisonSpecialCase2() {
Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(specialCaseSmaller, smaller));
Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(smaller, specialCaseSmaller));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand All @@ -176,7 +176,7 @@ public void testMinByComparisonMultiple() {
Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(smaller, bigger));
Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(bigger, smaller));
} catch (Exception e) {
Assert.fail("No exception should be thrown while comapring both tuples");
Assert.fail("No exception should be thrown while comparing both tuples");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ public void testAddSourceFieldOffsets() {
// --------------------------------------------------------------------------------------------

@Test
public void testForwardedNoArrrowIndividualStrings() {
public void testForwardedNoArrowIndividualStrings() {
String[] forwardedFields = {"f2","f3","f0"};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(sp, forwardedFields, null, null, fiveIntTupleType, fiveIntTupleType);
Expand All @@ -335,7 +335,7 @@ public void testForwardedNoArrrowIndividualStrings() {
}

@Test
public void testForwardedNoArrrowOneString() {
public void testForwardedNoArrowOneString() {
String[] forwardedFields = {"f2;f3;f0"};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(sp, forwardedFields, null, null, fiveIntTupleType, fiveIntTupleType);
Expand All @@ -360,7 +360,7 @@ public void testForwardedNoArrrowOneString() {
}

@Test
public void testForwardedNoArrrowSpaces() {
public void testForwardedNoArrowSpaces() {
String[] forwardedFields = {" f2 ; f3 ; f0 "};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(sp, forwardedFields, null, null, fiveIntTupleType, fiveIntTupleType);
Expand Down Expand Up @@ -1219,11 +1219,11 @@ public void testForwardedDual() {

@Test
public void testNonForwardedDual() {
String[] nonNorwardedFieldsFirst = { "f1;f2" };
String[] nonNorwardedFieldsSecond = { "f0" };
String[] nonForwardedFieldsFirst = { "f1;f2" };
String[] nonForwardedFieldsSecond = { "f0" };
DualInputSemanticProperties dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
nonNorwardedFieldsFirst, nonNorwardedFieldsSecond, null, null, threeIntTupleType, threeIntTupleType, threeIntTupleType);
nonForwardedFieldsFirst, nonForwardedFieldsSecond, null, null, threeIntTupleType, threeIntTupleType, threeIntTupleType);

assertTrue(dsp.getForwardingTargetFields(0, 0).contains(0));
assertTrue(dsp.getForwardingTargetFields(0, 1).size() == 0);
Expand All @@ -1232,11 +1232,11 @@ public void testNonForwardedDual() {
assertTrue(dsp.getForwardingTargetFields(1, 1).contains(1));
assertTrue(dsp.getForwardingTargetFields(1, 2).contains(2));

nonNorwardedFieldsFirst[0] = "f1";
nonNorwardedFieldsSecond[0] = "";
nonForwardedFieldsFirst[0] = "f1";
nonForwardedFieldsSecond[0] = "";
dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
nonNorwardedFieldsFirst, null, null, null, threeIntTupleType, fiveIntTupleType, threeIntTupleType);
nonForwardedFieldsFirst, null, null, null, threeIntTupleType, fiveIntTupleType, threeIntTupleType);

assertTrue(dsp.getForwardingTargetFields(0, 0).contains(0));
assertTrue(dsp.getForwardingTargetFields(0, 1).size() == 0);
Expand All @@ -1245,11 +1245,11 @@ public void testNonForwardedDual() {
assertTrue(dsp.getForwardingTargetFields(1, 1).size() == 0);
assertTrue(dsp.getForwardingTargetFields(1, 2).size() == 0);

nonNorwardedFieldsFirst[0] = "";
nonNorwardedFieldsSecond[0] = "f2;f0";
nonForwardedFieldsFirst[0] = "";
nonForwardedFieldsSecond[0] = "f2;f0";
dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
null, nonNorwardedFieldsSecond, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
null, nonForwardedFieldsSecond, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);

assertTrue(dsp.getForwardingTargetFields(0, 0).size() == 0);
assertTrue(dsp.getForwardingTargetFields(0, 1).size() == 0);
Expand Down Expand Up @@ -1283,19 +1283,19 @@ public void testNonForwardedDual() {
@Test(expected = InvalidSemanticAnnotationException.class)
public void testNonForwardedDualInvalidTypes1() {

String[] nonNorwardedFieldsFirst = { "f1" };
String[] nonForwardedFieldsFirst = { "f1" };
DualInputSemanticProperties dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
nonNorwardedFieldsFirst, null, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
nonForwardedFieldsFirst, null, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
}

@Test(expected = InvalidSemanticAnnotationException.class)
public void testNonForwardedDualInvalidTypes2() {

String[] nonNorwardedFieldsSecond = { "f1" };
String[] nonForwardedFieldsSecond = { "f1" };
DualInputSemanticProperties dsp = new DualInputSemanticProperties();
SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
null, nonNorwardedFieldsSecond, null, null, threeIntTupleType, pojoInTupleType, threeIntTupleType);
null, nonForwardedFieldsSecond, null, null, threeIntTupleType, pojoInTupleType, threeIntTupleType);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@
/**
* This traversal creates the optimizer DAG from a program.
* It works as a visitor that walks the program's flow in a depth-first fashion, starting from the data sinks.
* During the descend, it creates an optimizer node for each operator, respectively data source or -sink.
* During the ascend, it connects the nodes to the full graph.
* During the descent it creates an optimizer node for each operator, respectively data source or sink.
* During the ascent it connects the nodes to the full graph.
*/
public class GraphCreatingVisitor implements Visitor<Operator<?>> {

Expand Down
Loading

0 comments on commit 423f4d6

Please sign in to comment.