當前位置: 首頁>>代碼示例>>Java>>正文


Java Function.call方法代碼示例

本文整理匯總了Java中org.apache.spark.api.java.function.Function.call方法的典型用法代碼示例。如果您正苦於以下問題:Java Function.call方法的具體用法?Java Function.call怎麽用?Java Function.call使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.spark.api.java.function.Function的用法示例。


在下文中一共展示了Function.call方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: assertExtractDocIdFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertExtractDocIdFunction(Function<Citation,String> function) throws Exception {
    
    CitationEntry citationEntry = CitationEntry.newBuilder()
            .setDestinationDocumentId("DEST_ID")
            .setPosition(2)
            .setExternalDestinationDocumentIds(Maps.newHashMap())
            .build();
    Citation citation = Citation.newBuilder()
            .setSourceDocumentId("SOURCE_ID")
            .setEntry(citationEntry)
            .build();
    
    
    String docId = function.call(citation);
    
    
    assertEquals("SOURCE_ID", docId);
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:19,代碼來源:CitationMatchingDirectCounterReporterTest.java

示例2: assertFinalMap

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertFinalMap(Function<Tuple2<CharSequence, Tuple2<Object,CharSequence>>,String> function) throws Exception {
    String entityText = "entity text";
    String joinedId = "some id";
    
    Tuple2<CharSequence, Tuple2<Object,CharSequence>> joinedById = new Tuple2<CharSequence, Tuple2<Object,CharSequence>>(
            joinedId, new Tuple2<Object,CharSequence>(null, entityText));
    
    String outputText = function.call(joinedById);
    
    assertEquals(entityText, outputText);
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:12,代碼來源:DocumentFilterTest.java

示例3: assertMapProjectOrganizationFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertMapProjectOrganizationFunction(Function<ProjectToOrganization, AffMatchProjectOrganization> function)
        throws Exception {
    // given
    ProjectToOrganization projectOrganization = mock(ProjectToOrganization.class);
    AffMatchProjectOrganization mappedProjectOrganization = mock(AffMatchProjectOrganization.class);
    when(projectOrganizationConverter.convert(projectOrganization)).thenReturn(mappedProjectOrganization);
    // execute
    AffMatchProjectOrganization retProjectOrganization = function.call(projectOrganization);
    // assert
    assertTrue(retProjectOrganization == mappedProjectOrganization);
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:12,代碼來源:IisProjectOrganizationReaderTest.java

示例4: assertKeyByDocumentProjectFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertKeyByDocumentProjectFunction(Function<AffMatchDocumentProject, String> function)
        throws Exception {
    // given
    AffMatchDocumentProject docProj = new AffMatchDocumentProject(documentId, projectId, confidenceLevel);
    // execute
    String projId = function.call(docProj);
    // assert
    assertThat(projId, equalTo(projectId));
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:10,代碼來源:DocumentOrganizationCombinerTest.java

示例5: assertKeyByProjectOrganizationFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertKeyByProjectOrganizationFunction(
        Function<AffMatchProjectOrganization, String> function) throws Exception {
    // given
    AffMatchProjectOrganization projOrg = new AffMatchProjectOrganization(projectId, organizationId);
    // execute
    String projId = function.call(projOrg);
    // assert
    assertThat(projId, equalTo(projectId));
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:10,代碼來源:DocumentOrganizationCombinerTest.java

示例6: assertMapDocumentOrganizationFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertMapDocumentOrganizationFunction(
        Function<Tuple2<String, Tuple2<AffMatchDocumentProject, AffMatchProjectOrganization>>, AffMatchDocumentOrganization> function)
                throws Exception {
    // execute
    AffMatchDocumentOrganization result = function
            .call(new Tuple2<String, Tuple2<AffMatchDocumentProject, AffMatchProjectOrganization>>(projectId,
                    new Tuple2<AffMatchDocumentProject, AffMatchProjectOrganization>(
                            new AffMatchDocumentProject(documentId, projectId, confidenceLevel),
                            new AffMatchProjectOrganization(projectId, organizationId))));
    // assert
    assertThat(result.getDocumentId(), equalTo(documentId));
    assertThat(result.getOrganizationId(), equalTo(organizationId));
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:14,代碼來源:DocumentOrganizationCombinerTest.java

示例7: assertExtractDocOrgIdFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertExtractDocOrgIdFunction(Function<MatchedOrganization, Tuple2<CharSequence, CharSequence>> function) throws Exception {
    
    // given
    MatchedOrganization matchedOrg = new MatchedOrganization("DOC_ID", "ORG_ID", 0.6f);
    
    // execute
    Tuple2<CharSequence, CharSequence> extractedDocOrgId = function.call(matchedOrg);
    
    // assert
    assertEquals("DOC_ID", extractedDocOrgId._1);
    assertEquals("ORG_ID", extractedDocOrgId._2);
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:13,代碼來源:IisAffMatchResultWriterTest.java

示例8: assertExtractDocIdFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertExtractDocIdFunction(Function<Tuple3<String, String, Float>,String> function) throws Exception {
    Tuple3<String, String, Float> tuple3 = new Tuple3<>("SOURCE_ID", "DEST_ID", 0.7f);
    String docId = function.call(tuple3);
    assertEquals("SOURCE_ID", docId);
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:6,代碼來源:SoftwareExportCounterReporterTest.java

示例9: joinWithWorkersAndMap

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
/**
 * A generic function for handling a blockified list of objects to their corresponding compute nodes
 *
 * If Spark is enabled:
 *
 *      Joins an instance of {@code List<Tuple2<LinearlySpacedIndexBlock, V>>} with {@link #computeRDD}, calls the provided
 *      map {@code mapper} on the RDD, and the reference to the old RDD will be replaced with the new RDD.
 *
 * If Spark is disabled:
 *
 *      Only a single target-space block is assumed, such that {@code data} is a singleton. The map function
 *      {@code mapper} will be called on the value contained in {@code data} and {@link #localComputeBlock}, and
 *      the old instance of {@link CoverageModelEMComputeBlock} is replaced with the new instance returned
 *      by {@code mapper.}
 *
 * @param data the list to joined and mapped together with the compute block(s)
 * @param mapper a mapper binary function that takes a compute block together with an object of type {@code V} and
 *               returns a new compute block
 * @param <V> the type of the object to the broadcasted
 */
@UpdatesRDD
private <V> void joinWithWorkersAndMap(@Nonnull final List<Tuple2<LinearlySpacedIndexBlock, V>> data,
                                       @Nonnull final Function<Tuple2<CoverageModelEMComputeBlock, V>, CoverageModelEMComputeBlock> mapper) {
    if (sparkContextIsAvailable) {
        final JavaPairRDD<LinearlySpacedIndexBlock, V> newRDD =
                ctx.parallelizePairs(data, numTargetBlocks).partitionBy(new HashPartitioner(numTargetBlocks));
        computeRDD = computeRDD.join(newRDD).mapValues(mapper);
    } else {
        try {
            Utils.validateArg(data.size() == 1, "Only a single data block is expected in the local mode");
            localComputeBlock = mapper.call(new Tuple2<>(localComputeBlock, data.get(0)._2));
        } catch (Exception e) {
            throw new RuntimeException("Can not apply the map function to the local compute block: " + e.getMessage());
        }
    }
}
 
開發者ID:broadinstitute,項目名稱:gatk-protected,代碼行數:37,代碼來源:CoverageModelEMWorkspace.java

示例10: assertMapToMatchResultFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertMapToMatchResultFunction(Function<Tuple2<AffMatchAffiliation, AffMatchOrganization>, AffMatchResult> function) throws Exception {
    
    // given
    
    AffMatchAffiliation affiliation = mock(AffMatchAffiliation.class);
    AffMatchOrganization organization = mock(AffMatchOrganization.class);
    
    
    // execute
    
    AffMatchResult affMatchResult = function.call(new Tuple2<>(affiliation, organization));
    
    
    // assert
    
    assertTrue(affiliation == affMatchResult.getAffiliation());
    assertTrue(organization == affMatchResult.getOrganization());
    
    
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:21,代碼來源:AffOrgMatchComputerTest.java

示例11: assertRecalcMatchStrengthFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertRecalcMatchStrengthFunction(Function<AffMatchResult, AffMatchResult> function, AffOrgMatchVoter voter) throws Exception {
    
    // given
    
    AffMatchResult affMatchResult = mock(AffMatchResult.class);
    AffMatchResult expectedRecalcAffMatchResult = mock(AffMatchResult.class);
    
    doReturn(expectedRecalcAffMatchResult).when(affOrgMatchStrengthRecalculator).recalculateMatchStrength(affMatchResult, voter);
    
    
    // execute
    
    AffMatchResult recalcAffMatchResult = function.call(affMatchResult);
    
    
    // assert
    
    assertTrue(expectedRecalcAffMatchResult == recalcAffMatchResult);
    
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:21,代碼來源:AffOrgMatchComputerTest.java

示例12: assertConvertDocumentProjectFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertConvertDocumentProjectFunction(Function<DocumentToProject, AffMatchDocumentProject> function) throws Exception {
    
    // given
    
    DocumentToProject inputDocProj = mock(DocumentToProject.class);
    AffMatchDocumentProject outputDocProj = mock(AffMatchDocumentProject.class);
    
    when(documentProjectConverter.convert(inputDocProj)).thenReturn(outputDocProj);
    
    // execute
    
    AffMatchDocumentProject retDocProj = function.call(inputDocProj);
    
    // assert
    
    assertTrue(retDocProj == outputDocProj);
}
 
開發者ID:openaire,項目名稱:iis,代碼行數:18,代碼來源:IisDocumentProjectReaderTest.java

示例13: assertConvertFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertConvertFunction(Function<AffMatchResult, MatchedOrganization> function) throws Exception {

        // given
        
        AffMatchResult affMatchResult = mock(AffMatchResult.class);
        MatchedOrganization matchedAff = mock(MatchedOrganization.class);
        
        when(affMatchResultConverter.convert(affMatchResult)).thenReturn(matchedAff);

        
        // execute
        
        MatchedOrganization retMatchedAff = function.call(affMatchResult);

        
        // assert
        
        assertNotNull(retMatchedAff);
        assertTrue(matchedAff == retMatchedAff);
        
    }
 
開發者ID:openaire,項目名稱:iis,代碼行數:22,代碼來源:IisAffMatchResultWriterTest.java

示例14: assertConvertFunction

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
private void assertConvertFunction(Function<Organization, AffMatchOrganization> function) throws Exception {

        // given
        
        Organization org = new Organization();
        org.setId("ORG1");
        
        AffMatchOrganization affMatchOrg = new AffMatchOrganization("ORG1");
        
        when(organizationConverter.convert(org)).thenReturn(affMatchOrg);

        
        // execute
        
        AffMatchOrganization retAffMatchOrg = function.call(org);

        
        // assert
        
        assertTrue(retAffMatchOrg == affMatchOrg);
        
    }
 
開發者ID:openaire,項目名稱:iis,代碼行數:23,代碼來源:IisOrganizationReaderTest.java

示例15: mapWorkers

import org.apache.spark.api.java.function.Function; //導入方法依賴的package包/類
/**
 * Calls a map function on the compute block(s) and returns the new compute block(s)
 *
 * If Spark is enabled:
 *
 *      The map is applied on the values of {@link #computeRDD}, the reference to the old RDD will be replaced
 *      by the new RDD, and original partitioning is retained
 *
 * If Spark is disabled:
 *
 *      Only a single target-space block is assumed; the map is applied to {@link #localComputeBlock} and the
 *      reference is updated accordingly
 *
 * @param mapper a map from {@link CoverageModelEMComputeBlock} onto itself
 */
@UpdatesRDD
private void mapWorkers(@Nonnull final Function<CoverageModelEMComputeBlock, CoverageModelEMComputeBlock> mapper) {
    if (sparkContextIsAvailable) {
        computeRDD = computeRDD.mapValues(mapper);
    } else {
        try {
            localComputeBlock = mapper.call(localComputeBlock);
        } catch (final Exception ex) {
            throw new RuntimeException("Can not apply the map function to the local compute block", ex);
        }
    }
}
 
開發者ID:broadinstitute,項目名稱:gatk-protected,代碼行數:28,代碼來源:CoverageModelEMWorkspace.java


注:本文中的org.apache.spark.api.java.function.Function.call方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。