当前位置: 首页>>代码示例>>Java>>正文


Java IntPointer.get方法代码示例

本文整理汇总了Java中org.bytedeco.javacpp.IntPointer.get方法的典型用法代码示例。如果您正苦于以下问题:Java IntPointer.get方法的具体用法?Java IntPointer.get怎么用?Java IntPointer.get使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.bytedeco.javacpp.IntPointer的用法示例。


在下文中一共展示了IntPointer.get方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: Encoder

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
public Encoder(int sampleRate, int channels, int frameSize, int framesPerPacket,
               int bitrate, int maxBufferSize) throws NativeAudioException {
    mBuffer = new byte[maxBufferSize];
    mAudioBuffer = new short[framesPerPacket * frameSize];
    mFramesPerPacket = framesPerPacket;
    mFrameSize = frameSize;
    mBufferedFrames = 0;
    mEncodedLength = 0;
    mTerminated = false;

    IntPointer error = new IntPointer(1);
    error.put(0);
    mState = opus_encoder_create(sampleRate, channels, OPUS_APPLICATION_VOIP, error);
    if (error.get() < 0)
        throw new NativeAudioException("Opus encoder initialization failed with error: " + getErrorMessage(error.get()));
    opus_encoder_ctl(mState, OPUS_SET_VBR_REQUEST, 0);
    opus_encoder_ctl(mState, OPUS_SET_BITRATE_REQUEST, bitrate);
}
 
开发者ID:Jack-Q,项目名称:messenger,代码行数:19,代码来源:OpusCodec.java

示例2: isamax

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
@Override
protected int isamax(int N, INDArray X, int incX) {
    if (Nd4j.dataType() != DataBuffer.Type.FLOAT)
        logger.warn("FLOAT iamax called");

    Nd4j.getExecutioner().push();

    CudaContext ctx = allocator.getFlowController().prepareAction(null, X);
    int ret2;

    CublasPointer xCPointer = new CublasPointer(X, ctx);

    cublasHandle_t handle = ctx.getHandle();
    synchronized (handle) {
        cublasSetStream_v2(new cublasContext(handle), new CUstream_st(ctx.getOldStream()));

        IntPointer resultPointer = new IntPointer(new int[] {0});
        cublasIsamax_v2(new cublasContext(handle), N, (FloatPointer) xCPointer.getDevicePointer(), incX,
                        resultPointer);
        ret2 = resultPointer.get();
    }
    allocator.registerAction(ctx, null, X);

    return ret2 - 1;
}
 
开发者ID:deeplearning4j,项目名称:nd4j,代码行数:26,代码来源:JcublasLevel1.java

示例3: idamax

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
@Override
protected int idamax(int N, INDArray X, int incX) {
    if (Nd4j.dataType() != DataBuffer.Type.DOUBLE)
        logger.warn("DOUBLE imax called");

    Nd4j.getExecutioner().push();

    CudaContext ctx = allocator.getFlowController().prepareAction(null, X);
    int ret2;

    CublasPointer xCPointer = new CublasPointer(X, ctx);

    cublasHandle_t handle = ctx.getHandle();
    synchronized (handle) {
        cublasSetStream_v2(new cublasContext(handle), new CUstream_st(ctx.getOldStream()));

        IntPointer resultPointer = new IntPointer(new int[] {0});
        cublasIdamax_v2(new cublasContext(handle), N, (DoublePointer) xCPointer.getDevicePointer(), incX,
                        resultPointer);
        ret2 = resultPointer.get();
    }

    allocator.registerAction(ctx, null, X);

    return ret2 - 1;
}
 
开发者ID:deeplearning4j,项目名称:nd4j,代码行数:27,代码来源:JcublasLevel1.java

示例4: getSupportedSampleFormats

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
/**
 * Get all supported sample formats by this {@code Codec}. If this {@code Codec}
 * is not an audio codec, then {@code null} is returned.
 *
 * @return all supported sample formats by this {@code Codec}.
 */
public SampleFormat[] getSupportedSampleFormats() {
	IntPointer sampleFormatsPointer = avCodec.sample_fmts();

	if (getType() != MediaType.AUDIO || sampleFormatsPointer == null)
		return null;

	List<SampleFormat> sampleFormats = new ArrayList<SampleFormat>();

	int format;
	int index = 0;
	while ((format = sampleFormatsPointer.get(index++)) != -1)
		sampleFormats.add(SampleFormat.byId(format));

	return sampleFormats.toArray(new SampleFormat[0]);
}
 
开发者ID:hoary,项目名称:JavaAV,代码行数:22,代码来源:Codec.java

示例5: getSupportedSampleRates

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
/**
 * Get all supported sample rates by this {@code Codec}. If this {@code Codec}
 * is not an audio codec, then {@code null} is returned. The sample rates are
 * ordered in ascending order.
 *
 * @return all supported sample rates by this {@code Codec}.
 */
public Integer[] getSupportedSampleRates() {
	IntPointer sampleRatesPointer = avCodec.supported_samplerates();

	if (getType() != MediaType.AUDIO || sampleRatesPointer == null)
		return null;

	List<Integer> sampleRates = new ArrayList<Integer>();

	int sampleRate;
	int index = 0;
	while ((sampleRate = sampleRatesPointer.get(index++)) != 0)
		sampleRates.add(sampleRate);

	// ascending order
	Collections.sort(sampleRates);

	return sampleRates.toArray(new Integer[0]);
}
 
开发者ID:hoary,项目名称:JavaAV,代码行数:26,代码来源:Codec.java

示例6: getSupportedPixelFormats

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
/**
 * Get all supported pixel formats by this {@code Codec}. If this {@code Codec}
 * is not a video codec, then {@code null} is returned. The pixel formats are
 * ordered in ascending order.
 *
 * @return all supported pixel formats by this {@code Codec}.
 */
public PixelFormat[] getSupportedPixelFormats() {
	IntPointer formatsPointer = avCodec.pix_fmts();

	if (getType() != MediaType.VIDEO || formatsPointer == null)
		return null;

	List<PixelFormat> pixelFormats = new ArrayList<PixelFormat>();

	int format;
	int index = 0;
	while ((format = formatsPointer.get(index++)) != -1)
		pixelFormats.add(PixelFormat.byId(format));

	// ascending order
	Collections.sort(pixelFormats);

	return pixelFormats.toArray(new PixelFormat[0]);
}
 
开发者ID:hoary,项目名称:JavaAV,代码行数:26,代码来源:Codec.java

示例7: Decoder

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
public Decoder(int sampleRate, int channels) throws NativeAudioException {
    IntPointer error = new IntPointer(1);
    error.put(0);
    mState = opus_decoder_create(sampleRate, channels, error);
    if (error.get() < 0)
        throw new NativeAudioException("Opus decoder initialization failed with error: " + getErrorMessage(error.get()));
}
 
开发者ID:Jack-Q,项目名称:messenger,代码行数:8,代码来源:OpusCodec.java

示例8: getBitrate

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
public int getBitrate() {
    IntPointer ptr = new IntPointer(1);
    opus_encoder_ctl(mState, OPUS_GET_BITRATE_REQUEST, ptr);
    return ptr.get();
}
 
开发者ID:Jack-Q,项目名称:messenger,代码行数:6,代码来源:OpusCodec.java

示例9: detect

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
public static DetectedMarker[] detect(ARToolKitPlus.TrackerMultiMarker tracker, opencv_core.IplImage image) {

        int cameraWidth = image.width();
        int cameraHeight = image.height();
        // TODO: check imgWith and init width.
        
        CvPoint2D32f corners = new CvPoint2D32f(4);
        CvMemStorage memory = CvMemStorage.create();
//        CvMat points = CvMat.create(1, 4, CV_32F, 2);
        Mat points = new Mat(1, 4, CV_32F, 2);
        
        CvSize subPixelSize = null, subPixelZeroZone = null;
        CvTermCriteria subPixelTermCriteria = null;
        int subPixelWindow = 11;

        subPixelSize = cvSize(subPixelWindow / 2, subPixelWindow / 2);
        subPixelZeroZone = cvSize(-1, -1);
        subPixelTermCriteria = cvTermCriteria(CV_TERMCRIT_EPS, 100, 0.001);

        int n = 0;
        IntPointer markerNum = new IntPointer(1);
        ARToolKitPlus.ARMarkerInfo markers = new ARToolKitPlus.ARMarkerInfo(null);
        tracker.arDetectMarkerLite(image.imageData(), tracker.getThreshold() /* 100 */, markers, markerNum);

        DetectedMarker[] markers2 = new DetectedMarker[markerNum.get(0)];

        for (int i = 0; i < markers2.length && !markers.isNull(); i++) {

            markers.position(i);
            int id = markers.id();
            if (id < 0) {
                // no detected ID...
                continue;
            }
            int dir = markers.dir();
            float confidence = markers.cf();
            float[] vertex = new float[8];
            markers.vertex().get(vertex);

            int w = subPixelWindow / 2 + 1;
            if (vertex[0] - w < 0 || vertex[0] + w >= cameraWidth || vertex[1] - w < 0 || vertex[1] + w >= cameraHeight
                    || vertex[2] - w < 0 || vertex[2] + w >= cameraWidth || vertex[3] - w < 0 || vertex[3] + w >= cameraHeight
                    || vertex[4] - w < 0 || vertex[4] + w >= cameraWidth || vertex[5] - w < 0 || vertex[5] + w >= cameraHeight
                    || vertex[6] - w < 0 || vertex[6] + w >= cameraWidth || vertex[7] - w < 0 || vertex[7] + w >= cameraHeight) {
                // too tight for cvFindCornerSubPix...

                continue;
            }

            // TODO: major bug here -> free error...
//            opencv_core.CvMat points = opencv_core.CvMat.create(1, 4, CV_32F, 2);
//            points.getFloatBuffer().put(vertex);
//            opencv_core.CvBox2D box = cvMinAreaRect2(points, memory);
//
//            float bw = box.size().width();
//            float bh = box.size().height();
//            cvClearMemStorage(memory);
//            if (bw <= 0 || bh <= 0 || bw / bh < 0.1 || bw / bh > 10) {
//                // marker is too "flat" to have been IDed correctly...
//                continue;
//            }

            for (int j = 0; j < 4; j++) {
                corners.position(j).put(vertex[2 * j], vertex[2 * j + 1]);
            }

            cvFindCornerSubPix(image, corners.position(0), 4, subPixelSize, subPixelZeroZone, subPixelTermCriteria);
            double[] d = {corners.position((4 - dir) % 4).x(), corners.position((4 - dir) % 4).y(),
                corners.position((5 - dir) % 4).x(), corners.position((5 - dir) % 4).y(),
                corners.position((6 - dir) % 4).x(), corners.position((6 - dir) % 4).y(),
                corners.position((7 - dir) % 4).x(), corners.position((7 - dir) % 4).y()};

            markers2[n++] = new DetectedMarker(id, d, confidence);
        }
        return Arrays.copyOf(markers2, n);
    }
 
开发者ID:poqudrof,项目名称:PapARt,代码行数:77,代码来源:DetectedMarker.java

示例10: main

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
public static void main(String[] args) {
    // Load two images and allocate other structures
    IplImage imgA = cvLoadImage(
            "image0.png",
            CV_LOAD_IMAGE_GRAYSCALE);
    IplImage imgB = cvLoadImage(
            "image1.png",
            CV_LOAD_IMAGE_GRAYSCALE);

    CvSize img_sz = cvGetSize(imgA);
    int win_size = 15;

    // IplImage imgC = cvLoadImage("OpticalFlow1.png",
    // CV_LOAD_IMAGE_UNCHANGED);
    IplImage imgC = cvLoadImage(
            "image0.png",
            CV_LOAD_IMAGE_UNCHANGED);
    // Get the features for tracking
    IplImage eig_image = cvCreateImage(img_sz, IPL_DEPTH_32F, 1);
    IplImage tmp_image = cvCreateImage(img_sz, IPL_DEPTH_32F, 1);

    IntPointer corner_count = new IntPointer(1).put(MAX_CORNERS);
    CvPoint2D32f cornersA = new CvPoint2D32f(MAX_CORNERS);

    CvArr mask = null;
    cvGoodFeaturesToTrack(imgA, eig_image, tmp_image, cornersA,
            corner_count, 0.05, 5.0, mask, 3, 0, 0.04);

    cvFindCornerSubPix(imgA, cornersA, corner_count.get(),
            cvSize(win_size, win_size), cvSize(-1, -1),
            cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));

    // Call Lucas Kanade algorithm
    BytePointer features_found = new BytePointer(MAX_CORNERS);
    FloatPointer feature_errors = new FloatPointer(MAX_CORNERS);

    CvSize pyr_sz = cvSize(imgA.width() + 8, imgB.height() / 3);

    IplImage pyrA = cvCreateImage(pyr_sz, IPL_DEPTH_32F, 1);
    IplImage pyrB = cvCreateImage(pyr_sz, IPL_DEPTH_32F, 1);

    CvPoint2D32f cornersB = new CvPoint2D32f(MAX_CORNERS);
    cvCalcOpticalFlowPyrLK(imgA, imgB, pyrA, pyrB, cornersA, cornersB,
            corner_count.get(), cvSize(win_size, win_size), 5,
            features_found, feature_errors,
            cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.3), 0);

    // Make an image of the results
    for (int i = 0; i < corner_count.get(); i++) {
        if (features_found.get(i) == 0 || feature_errors.get(i) > 550) {
            System.out.println("Error is " + feature_errors.get(i) + "/n");
            continue;
        }
        System.out.println("Got it/n");
        cornersA.position(i);
        cornersB.position(i);
        CvPoint p0 = cvPoint(Math.round(cornersA.x()),
                Math.round(cornersA.y()));
        CvPoint p1 = cvPoint(Math.round(cornersB.x()),
                Math.round(cornersB.y()));
        cvLine(imgC, p0, p1, CV_RGB(255, 0, 0), 
                2, 8, 0);
    }

    cvSaveImage(
            "image0-1.png",
            imgC);
    cvNamedWindow( "LKpyr_OpticalFlow", 0 );
    cvShowImage( "LKpyr_OpticalFlow", imgC );
    cvWaitKey(0);
}
 
开发者ID:duodecimo,项目名称:jmcv,代码行数:72,代码来源:OpticalFlowTracker.java

示例11: recognize

import org.bytedeco.javacpp.IntPointer; //导入方法依赖的package包/类
public int recognize(IplImage faceData) {

		Mat faces = cvarrToMat(faceData);

		cvtColor(faces, faces, CV_BGR2GRAY);

		IntPointer label = new IntPointer(1);
		DoublePointer confidence = new DoublePointer(0);
		this.faceRecognizer.predict(faces, label, confidence);

		int predictedLabel = label.get(0);

		return predictedLabel;

	}
 
开发者ID:MeAnupSarkar,项目名称:ExoVisix,代码行数:16,代码来源:FaceRecognizer.java


注:本文中的org.bytedeco.javacpp.IntPointer.get方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。