当前位置: 首页>>代码示例>>Java>>正文


Java ArArchiveInputStream类代码示例

本文整理汇总了Java中org.apache.commons.compress.archivers.ar.ArArchiveInputStream的典型用法代码示例。如果您正苦于以下问题:Java ArArchiveInputStream类的具体用法?Java ArArchiveInputStream怎么用?Java ArArchiveInputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


ArArchiveInputStream类属于org.apache.commons.compress.archivers.ar包,在下文中一共展示了ArArchiveInputStream类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getMediaType

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
static MediaType getMediaType(ArchiveInputStream stream) {
    if (stream instanceof JarArchiveInputStream) {
        return JAR;
    } else if (stream instanceof ZipArchiveInputStream) {
        return ZIP;
    } else if (stream instanceof ArArchiveInputStream) {
        return AR;
    } else if (stream instanceof CpioArchiveInputStream) {
        return CPIO;
    } else if (stream instanceof DumpArchiveInputStream) {
        return DUMP;
    } else if (stream instanceof TarArchiveInputStream) {
        return TAR;
    } else {
        return MediaType.OCTET_STREAM;
    }
}
 
开发者ID:kolbasa,项目名称:OCRaptor,代码行数:18,代码来源:PackageParser.java

示例2: parseControlFile

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
public static Map<String, String> parseControlFile ( final File packageFile ) throws IOException, ParserException
{
    try ( final ArArchiveInputStream in = new ArArchiveInputStream ( new FileInputStream ( packageFile ) ) )
    {
        ArchiveEntry ar;
        while ( ( ar = in.getNextEntry () ) != null )
        {
            if ( !ar.getName ().equals ( "control.tar.gz" ) )
            {
                continue;
            }
            try ( final TarArchiveInputStream inputStream = new TarArchiveInputStream ( new GZIPInputStream ( in ) ) )
            {
                TarArchiveEntry te;
                while ( ( te = inputStream.getNextTarEntry () ) != null )
                {
                    String name = te.getName ();
                    if ( name.startsWith ( "./" ) )
                    {
                        name = name.substring ( 2 );
                    }
                    if ( !name.equals ( "control" ) )
                    {
                        continue;
                    }
                    return parseControlFile ( inputStream );
                }
            }
        }
    }
    return null;
}
 
开发者ID:eclipse,项目名称:packagedrone,代码行数:33,代码来源:Packages.java

示例3: read

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
/**
 * Read from compressed file
 * 
 * @param srcPath
 *            path of compressed file
 * @param fileCompressor
 *            FileCompressor object
 * @throws Exception
 */
@Override
public void read(String srcPath, FileCompressor fileCompressor)
        throws Exception {
    long t1 = System.currentTimeMillis();
    byte[] data = FileUtil.convertFileToByte(srcPath);
    ByteArrayInputStream bais = new ByteArrayInputStream(data);
    ArArchiveInputStream ais = new ArArchiveInputStream(bais);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {
        byte[] buffer = new byte[1024];
        int readByte;
        ArArchiveEntry entry = ais.getNextArEntry();
        while (entry != null && entry.getSize() > 0) {
            long t2 = System.currentTimeMillis();
            baos = new ByteArrayOutputStream();
            readByte = ais.read(buffer);
            while (readByte != -1) {
                baos.write(buffer, 0, readByte);
                readByte = ais.read(buffer);
            }
            BinaryFile binaryFile = new BinaryFile(entry.getName(),
                    baos.toByteArray());
            fileCompressor.addBinaryFile(binaryFile);
            LogUtil.createAddFileLog(fileCompressor, binaryFile, t2,
                    System.currentTimeMillis());
            entry = ais.getNextArEntry();
        }
    } catch (Exception e) {
        FileCompressor.LOGGER.error("Error on get compressor file", e);
    } finally {
        baos.close();
        ais.close();
        bais.close();
    }
    LogUtil.createReadLog(fileCompressor, srcPath, data.length, t1,
            System.currentTimeMillis());
}
 
开发者ID:espringtran,项目名称:compressor4j,代码行数:47,代码来源:ArProcessor.java

示例4: thinArchivesDoNotContainAbsolutePaths

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
@Test
public void thinArchivesDoNotContainAbsolutePaths() throws IOException {
  CxxPlatform cxxPlatform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));
  BuildRuleResolver ruleResolver =
      new SingleThreadedBuildRuleResolver(
          TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
  assumeTrue(cxxPlatform.getAr().resolve(ruleResolver).supportsThinArchives());
  ProjectWorkspace workspace =
      TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_library", tmp);
  workspace.setUp();
  Path archive =
      workspace.buildAndReturnOutput("-c", "cxx.archive_contents=thin", "//:foo#default,static");

  // NOTE: Replace the thin header with a normal header just so the commons compress parser
  // can parse the archive contents.
  try (OutputStream outputStream =
      Files.newOutputStream(workspace.getPath(archive), StandardOpenOption.WRITE)) {
    outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER);
  }

  // Now iterate the archive and verify it contains no absolute paths.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(workspace.getPath(archive).toFile()))) {
    ArArchiveEntry entry;
    while ((entry = stream.getNextArEntry()) != null) {
      if (!entry.getName().isEmpty()) {
        assertFalse(
            "found absolute path: " + entry.getName(),
            workspace.getDestPath().getFileSystem().getPath(entry.getName()).isAbsolute());
      }
    }
  }
}
 
开发者ID:facebook,项目名称:buck,代码行数:35,代码来源:CxxLibraryIntegrationTest.java

示例5: extractAr

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
public static void extractAr(File file, File destination) throws IOException {
    try (final ArArchiveInputStream in = new ArArchiveInputStream(new FileInputStream(file))) {
        extractArchive(in, destination);
    }
}
 
开发者ID:reines,项目名称:dropwizard-debpkg-maven-plugin,代码行数:6,代码来源:ArchiveUtils.java

示例6: thatGeneratedArchivesAreDeterministic

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
@Test
@SuppressWarnings("PMD.AvoidUsingOctalValues")
public void thatGeneratedArchivesAreDeterministic() throws IOException, InterruptedException {
  assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver =
      new SingleThreadedBuildRuleResolver(
          TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
  SourcePathResolver sourcePathResolver =
      DefaultSourcePathResolver.from(new SourcePathRuleFinder(ruleResolver));
  Archiver archiver = platform.getAr().resolve(ruleResolver);
  Path output = filesystem.getPath("output.a");
  Path input = filesystem.getPath("input.dat");
  filesystem.writeContentsToPath("blah", input);
  Preconditions.checkState(filesystem.resolve(input).toFile().setExecutable(true));

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(sourcePathResolver),
          archiver.getCommandPrefix(sourcePathResolver),
          ImmutableList.of(),
          getArchiveOptions(false),
          output,
          ImmutableList.of(input),
          archiver,
          filesystem.getPath("scratchDir"));
  FileScrubberStep fileScrubberStep =
      new FileScrubberStep(filesystem, output, archiver.getScrubbers());

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);
  exitCode = fileScrubberStep.execute(executionContext).getExitCode();
  assertEquals("archive scrub step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    ArArchiveEntry entry = stream.getNextArEntry();
    assertEquals(
        ObjectFileCommonModificationDate.COMMON_MODIFICATION_TIME_STAMP, entry.getLastModified());
    assertEquals(0, entry.getUserId());
    assertEquals(0, entry.getGroupId());
    assertEquals(String.format("0%o", entry.getMode()), 0100644, entry.getMode());
  }
}
 
开发者ID:facebook,项目名称:buck,代码行数:56,代码来源:ArchiveStepIntegrationTest.java

示例7: emptyArchives

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
@Test
public void emptyArchives() throws IOException, InterruptedException {
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver =
      new SingleThreadedBuildRuleResolver(
          TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
  SourcePathResolver sourcePathResolver =
      DefaultSourcePathResolver.from(new SourcePathRuleFinder(ruleResolver));
  Archiver archiver = platform.getAr().resolve(ruleResolver);
  Path output = filesystem.getPath("output.a");

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(sourcePathResolver),
          archiver.getCommandPrefix(sourcePathResolver),
          ImmutableList.of(),
          getArchiveOptions(false),
          output,
          ImmutableList.of(),
          archiver,
          filesystem.getPath("scratchDir"));

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    assertThat(stream.getNextArEntry(), Matchers.nullValue());
  }
}
 
开发者ID:facebook,项目名称:buck,代码行数:42,代码来源:ArchiveStepIntegrationTest.java

示例8: inputDirs

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
@Test
public void inputDirs() throws IOException, InterruptedException {
  assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver =
      new SingleThreadedBuildRuleResolver(
          TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
  SourcePathResolver sourcePathResolver =
      DefaultSourcePathResolver.from(new SourcePathRuleFinder(ruleResolver));
  Archiver archiver = platform.getAr().resolve(ruleResolver);
  Path output = filesystem.getPath("output.a");
  Path input = filesystem.getPath("foo/blah.dat");
  filesystem.mkdirs(input.getParent());
  filesystem.writeContentsToPath("blah", input);

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(sourcePathResolver),
          archiver.getCommandPrefix(sourcePathResolver),
          ImmutableList.of(),
          getArchiveOptions(false),
          output,
          ImmutableList.of(input.getParent()),
          archiver,
          filesystem.getPath("scratchDir"));

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    ArArchiveEntry entry = stream.getNextArEntry();
    assertThat(entry.getName(), Matchers.equalTo("blah.dat"));
  }
}
 
开发者ID:facebook,项目名称:buck,代码行数:47,代码来源:ArchiveStepIntegrationTest.java

示例9: thinArchives

import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; //导入依赖的package包/类
@Test
public void thinArchives() throws IOException, InterruptedException {
  assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver =
      new SingleThreadedBuildRuleResolver(
          TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
  SourcePathResolver sourcePathResolver =
      DefaultSourcePathResolver.from(new SourcePathRuleFinder(ruleResolver));
  Archiver archiver = platform.getAr().resolve(ruleResolver);

  assumeTrue(archiver.supportsThinArchives());

  Path output = filesystem.getPath("foo/libthin.a");
  filesystem.mkdirs(output.getParent());

  // Create a really large input file so it's obvious that the archive is thin.
  Path input = filesystem.getPath("bar/blah.dat");
  filesystem.mkdirs(input.getParent());
  byte[] largeInputFile = new byte[1024 * 1024];
  byte[] fillerToRepeat = "hello\n".getBytes(StandardCharsets.UTF_8);
  for (int i = 0; i < largeInputFile.length; i++) {
    largeInputFile[i] = fillerToRepeat[i % fillerToRepeat.length];
  }
  filesystem.writeBytesToPath(largeInputFile, input);

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(sourcePathResolver),
          archiver.getCommandPrefix(sourcePathResolver),
          ImmutableList.of(),
          getArchiveOptions(true),
          output,
          ImmutableList.of(input),
          archiver,
          filesystem.getPath("scratchDir"));

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Verify that the thin header is present.
  assertThat(filesystem.readFirstLine(output), Matchers.equalTo(Optional.of("!<thin>")));

  // Verify that even though the archived contents is really big, the archive is still small.
  assertThat(filesystem.getFileSize(output), Matchers.lessThan(1000L));

  // NOTE: Replace the thin header with a normal header just so the commons compress parser
  // can parse the archive contents.
  try (OutputStream outputStream =
      Files.newOutputStream(filesystem.resolve(output), StandardOpenOption.WRITE)) {
    outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER);
  }

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    ArArchiveEntry entry = stream.getNextArEntry();

    // Verify that the input names are relative paths from the outputs parent dir.
    assertThat(
        entry.getName(), Matchers.equalTo(output.getParent().relativize(input).toString()));
  }
}
 
开发者ID:facebook,项目名称:buck,代码行数:74,代码来源:ArchiveStepIntegrationTest.java


注:本文中的org.apache.commons.compress.archivers.ar.ArArchiveInputStream类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。