当前位置: 首页>>代码示例>>C++>>正文


C++ PClip类代码示例

本文整理汇总了C++中PClip的典型用法代码示例。如果您正苦于以下问题:C++ PClip类的具体用法?C++ PClip怎么用?C++ PClip使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了PClip类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: if

PVideoFrame __stdcall ShapeMask::GetFrame(int n, IScriptEnvironment* env) {
	int colorspace;

	if (vi.IsRGB24())      colorspace = RGB24;
	else if (vi.IsRGB32()) colorspace = RGB32;
	else if (vi.IsYUY2())  colorspace = YUV2;
	else if (vi.IsYV12())  colorspace = YV12;
	else raiseError(env, "Unsupported color space, must be one of RGB24, RGB32, YUV2 or YV12");

	PClip srcClip = toGrayScale(env, child);
	PVideoFrame src = srcClip->GetFrame(n, env);
	PVideoFrame dst = env->NewVideoFrame(vi);

	const uchar* srcp = src->GetReadPtr();
	const int src_pitch = src->GetPitch();
	const int bpp = vi.BitsPerPixel();

	uchar* retp;

	// No change to the source pixels in the process steps, so ok to cast to non-const
	// returns a 1 channel gray scale image which needs to be converted to whatever format the source clip is in.
	retp = process_frame((uchar*)srcp, vi.width, vi.height, src_pitch, colorspace, threshold, minarea, rectonly);

	if (vi.IsPlanar()) copyPlanar(retp, dst, bpp);
	else if (vi.IsYUY2()) copyYUY2(retp, dst);
	else copyRGB(retp, dst, bpp);

	delete retp;
	return dst;
}
开发者ID:jojje,项目名称:shapemask,代码行数:30,代码来源:shapemask.cpp

示例2: Create_SoundTouch

AVSValue __cdecl Create_SoundTouch(AVSValue args, void*, IScriptEnvironment* env) {

  try {	// HIDE DAMN SEH COMPILER BUG!!!

  PClip clip = args[0].AsClip();

  if (!clip->GetVideoInfo().HasAudio())
    env->ThrowError("Input clip does not have audio.");

  if (!(clip->GetVideoInfo().SampleType()&SAMPLE_FLOAT))
    env->ThrowError("Input audio sample format to TimeStretch must be float.");

  if (args[0].AsClip()->GetVideoInfo().AudioChannels() == 2) {
    return new AVSStereoSoundTouch(args[0].AsClip(), 
      (float)args[1].AsFloat(100.0), 
      (float)args[2].AsFloat(100.0), 
      (float)args[3].AsFloat(100.0), 
	    &args[4],
      env);
  }
  return new AVSsoundtouch(args[0].AsClip(), 
    (float)args[1].AsFloat(100.0), 
    (float)args[2].AsFloat(100.0), 
    (float)args[3].AsFloat(100.0), 
	  &args[4],
    env);

	}
	catch (...) { throw; }
}
开发者ID:1974kpkpkp,项目名称:AviSynthPlus,代码行数:30,代码来源:TimeStretch.cpp

示例3: main

int main() {
    try {
        cout << "Creating script environment 1..." << endl;
        IScriptEnvironment* env1 = CreateScriptEnvironment(3);

        cout << "Creating script environment 2..." << endl;
        IScriptEnvironment* env2 = CreateScriptEnvironment(3);

        cout << "Deleting script environment 1..." << endl;
        delete env1;

        cout << "Invoking BlankClip on env 2..." << endl;
        AVSValue ret = env2->Invoke("BlankClip", AVSValue(), 0);
        PClip clp = ret.AsClip();

        cout << "Reading frame 0 from env2..." << endl;
        PVideoFrame frm = clp->GetFrame(0, env2);
    } catch (AvisynthError &e) {
        cerr << "AvisynthError: " << e.msg << endl;
        return -1;
    } catch (...) {
        cerr << "unknown error" << endl;
        return -1;
    }

    return 0;
}
开发者ID:GDXN,项目名称:avxsynth,代码行数:27,代码来源:multiple_script_env.cpp

示例4: CreateAreaResize

AVSValue __cdecl CreateAreaResize(AVSValue args, void* user_data, IScriptEnvironment* env)
{
    PClip clip = args[0].AsClip();
    int target_width = args[1].AsInt();
    int target_height = args[2].AsInt();

    if (target_width < 1 || target_height < 1) {
        env->ThrowError("AreaResize: target width/height must be 1 or higher.");
    }

    const VideoInfo& vi = clip->GetVideoInfo();
    if (vi.IsYUY2()) {
        env->ThrowError("AreaResize: Unsupported colorspace(YUY2).");
    }
    if (vi.IsYV411() && target_width & 3) {
        env->ThrowError("AreaResize: Target width requires mod 4.");
    }
    if ((vi.IsYV16() || vi.IsYV12()) && target_width & 1) {
        env->ThrowError("AreaResize: Target width requires mod 2.");
    }
    if (vi.IsYV12() && target_height & 1) {
        env->ThrowError("AreaResize: Target height requires mod 2.");
    }
    if (vi.width < target_width || vi.height < target_height) {
        env->ThrowError("AreaResize: This filter is only for down scale.");
    }

    return new AreaResize(clip, target_width, target_height, env);
}
开发者ID:chikuzen,项目名称:AreaResize,代码行数:29,代码来源:AreaResize.cpp

示例5: GenericVideoFilter

FTurn::FTurn(PClip child, TurnDirection direction, bool chroma, bool mt, IScriptEnvironment* env) 
    : GenericVideoFilter(child), chroma_(chroma), mt_(mt) {
    if (!isSupportedColorspace(vi.pixel_type)) {
        env->ThrowError(getUnsupportedColorspaceMessage());
    }

    if (!(env->GetCPUFlags() & CPUF_SSE2)) {
        env->ThrowError("Sorry, at least SSE2 is required");
    }
    
    int CPUInfo[4]; //eax, ebx, ecx, edx
    __cpuid(CPUInfo, 1);

    #pragma warning(disable: 4800)
    bool ssse3 = CPUInfo[2] & 0x00000200;
    #pragma warning(disable: 4800)

    if (direction == TurnDirection::RIGHT || direction == TurnDirection::LEFT) {
        vi.width = child->GetVideoInfo().height;
        vi.height = child->GetVideoInfo().width;

        if (direction == TurnDirection::LEFT) {
            turnFunction_ = turnPlaneLeft;
        } else {
            turnFunction_ = ssse3 ? turnPlaneRightSSSE3 : turnPlaneRightSSE2;
        }
    } else {
        turnFunction_ = ssse3 ? turnPlane180SSSE3 : turnPlane180SSE2;
    }
}
开发者ID:tp7,项目名称:fturn,代码行数:30,代码来源:fturn.cpp

示例6: SupportFilter

	Binary::Binary(BinaryComputePlane *_computeplane, PClip _child, PClip _secondclip, AVSValue _chroma, IScriptEnvironment *env)
	: SupportFilter(_child, env), secondclip(_secondclip), 
	computeplane(*_computeplane),
	computechroma(  *(decipherchromaargument(_chroma, makevector(_child,_secondclip), *_computeplane) ) )
	{
		if(_secondclip->GetVideoInfo().width != vi.width)
			env->ThrowError("LimitedSupport binary filter: widths do not match.");
		if(_secondclip->GetVideoInfo().height != vi.height)
			env->ThrowError("LimitedSupport binary filter: heights do not match.");
	}
开发者ID:alexeiemam,项目名称:avsScripts,代码行数:10,代码来源:PlanarFilter.cpp

示例7: GenericVideoFilter

TMaskCleaner::TMaskCleaner(PClip child, int length, int thresh, IScriptEnvironment* env) : GenericVideoFilter(child), m_length(length), m_thresh(thresh), lookup(nullptr) {
    if (!child->GetVideoInfo().IsYV12()) {
        env->ThrowError("Only YV12 and YV24 is supported!");
    }
    if (length <= 0 || thresh <= 0) {
        env->ThrowError("Invalid arguments!");
    }
    lookup = new BYTE[child->GetVideoInfo().height * child->GetVideoInfo().width / 8];
    m_w = child->GetVideoInfo().width;
}
开发者ID:hetarenaossan,项目名称:tmaskcleaner,代码行数:10,代码来源:tmaskcleaner.cpp

示例8: GenericVideoFilter

FilteredResizeH::FilteredResizeH( PClip _child, double subrange_left, double subrange_width,
                                  int target_width, ResamplingFunction* func, IScriptEnvironment* env )
  : GenericVideoFilter(_child), tempY(0), tempUV(0),pattern_luma(0),pattern_chroma(0),
  CodeGenerator(false) //Josh: Codegenerator construtor takes arg x64
{
	try {	// HIDE DAMN SEH COMPILER BUG!!!
  pattern_luma = pattern_chroma = (int *)0;
  tempUV = tempY = 0;

  original_width = _child->GetVideoInfo().width;

  if (target_width<=0)
    env->ThrowError("Resize: Width must be greater than 0.");

  if (vi.IsYUV())
  {
    if ((target_width&1) && (vi.IsYUY2()))
      env->ThrowError("Resize: YUY2 width must be even");
    if ((target_width&1) && (vi.IsYV12()))
      env->ThrowError("Resize: YV12 width must be even.");

    tempY = (BYTE*) _aligned_malloc(original_width*2+4+32, 64);   // aligned for Athlon cache line
    tempUV = (BYTE*) _aligned_malloc(original_width*4+8+32, 64);  // aligned for Athlon cache line

    if (vi.IsYV12()) {
      pattern_chroma = GetResamplingPatternYUV( vi.width>>1, subrange_left/2.0, subrange_width/2.0,
        target_width>>1, func, true, tempY, env );
    } else {
      pattern_chroma = GetResamplingPatternYUV( vi.width>>1, subrange_left/2.0, subrange_width/2.0,
        target_width>>1, func, false, tempUV, env );
    }
    pattern_luma = GetResamplingPatternYUV(vi.width, subrange_left, subrange_width, target_width, func, true, tempY, env);
  }
开发者ID:Dias19,项目名称:avisynth64,代码行数:33,代码来源:resample.cpp

示例9: Create

PClip AlignPlanar::Create(PClip clip) 
{
  if (!clip->GetVideoInfo().IsPlanar()) {  // If not planar, already ok.
    return clip;
  }
  else 
    return new AlignPlanar(clip);
}
开发者ID:GDXN,项目名称:avxsynth,代码行数:8,代码来源:alignplanar.cpp

示例10: MVDegrainBase

MVDegrainMulti::MVDegrainMulti(PClip _child, PClip mvMulti, int _RefFrames, int _thSAD, int _thSADC, int _YUVplanes, int _nLimit,
					          PClip _pelclip, int _nIdx, int _nSCD1, int _nSCD2, bool _mmx, bool _isse, int _MaxThreads,
                              int _PreFetch, int _SadMode, IScriptEnvironment* env) :
			    MVDegrainBase(_child, _RefFrames, _YUVplanes, _nLimit, _pelclip, _nIdx, _mmx, _isse, env, mvMulti, 
                              "MVDegrainMulti", 0, _MaxThreads, _PreFetch, _SadMode), RefFrames(_RefFrames)
{
    if (RefFrames<1 || RefFrames>32) env->ThrowError("MVDegrainMulti: refframes must be >=1 and <=32");

    // get the true number of reference frames
    VideoInfo mvMultivi=mvMulti->GetVideoInfo();
    unsigned int RefFramesAvailable=mvMultivi.height/2;

    // if refframes is greater than MVAnalyseMulti height then limit to height
    if (RefFramesAvailable<RefFrames) {
        RefFrames=RefFramesAvailable;
        UpdateNumRefFrames(RefFrames, env);
    }

    // PreFetch max 21 since 21*3=63 and 64 is max threads at one time
    if (_PreFetch<1 || _PreFetch>21) env->ThrowError("MVDegrainMulti: PreFetch must be >=1 and <=21");

    if (_PreFetch*RefFrames>32) env->ThrowError("MVDegrainMulti: PreFetch*RefFrames<=32");

    // initialize MVClip's which are in order BX, ..., B3, B2, B1, F1, F2, F3, ..., FX in mvMulti
    for (unsigned int PreFetchNum=0; PreFetchNum<static_cast<unsigned int>(_PreFetch); ++PreFetchNum) {
        if (RefFrames<RefFramesAvailable) {
            // we are taking a subset of the mvMulti clip
            for(unsigned int RefNum=0; RefNum<RefFrames; ++RefNum) {
                pmvClipF[PreFetchNum][RefNum]=new MVClip(mvMulti, _nSCD1, _nSCD2, env, true, RefFramesAvailable+RefNum);  
                pmvClipB[PreFetchNum][RefNum]=new MVClip(mvMulti, _nSCD1, _nSCD2, env, true, RefFramesAvailable-RefNum-1);   
            }               
        }
        else {
            // we are taking the full mvMulti clip
            for(unsigned int RefNum=0; RefNum<RefFrames; ++RefNum) {
                pmvClipF[PreFetchNum][RefNum]=new MVClip(mvMulti, _nSCD1, _nSCD2, env, true, RefFrames+RefNum);  
                pmvClipB[PreFetchNum][RefNum]=new MVClip(mvMulti, _nSCD1, _nSCD2, env, true, RefFrames-RefNum-1);   
            }
        }
    }

    // check simularities
    CheckSimilarity(*pmvClipF[0][0], "mvMulti", env); // only need to check one since they are grouped together

    // normalize thSAD
    thSAD  = _thSAD*pmvClipB[0][0]->GetThSCD1()/_nSCD1; // normalize to block SAD
    thSADC = _thSADC*pmvClipB[0][0]->GetThSCD1()/_nSCD1; // chroma

    // find the maximum extent
    unsigned int MaxDelta=static_cast<unsigned int>(pmvClipF[0][RefFrames-1]->GetDeltaFrame());
    if (static_cast<unsigned int>(pmvClipB[0][RefFrames-1]->GetDeltaFrame())>MaxDelta)
        MaxDelta=static_cast<unsigned int>(pmvClipB[0][RefFrames-1]->GetDeltaFrame());

    // numframes 2*MaxDelta+1, i.e. to cover all possible frames in sliding window
    mvCore->AddFrames(nIdx, (2*MaxDelta)*_PreFetch+1, pmvClipB[0][0]->GetLevelCount(), nWidth, nHeight, nPel, nHPadding, nVPadding, 
                      YUVPLANES, _isse, yRatioUV);
}
开发者ID:alexeiemam,项目名称:avsScripts,代码行数:57,代码来源:MVDegrainMulti.cpp

示例11:

	AdjustedColorDifference::AdjustedColorDifference(IScriptEnvironment* env, PClip input, double factor, PClip subtrahend /* optional */) :
		GenericVideoFilter(input), m_factor(factor), m_subtrahend(subtrahend)
	{
		if (!vi.IsRGB32())
			env->ThrowError("plugin supports only RGB32 input");
		if (subtrahend != nullptr) {
			auto svi = subtrahend->GetVideoInfo();
			CheckVideoInfo(env, vi, svi);
		}
	}
开发者ID:slavanap,项目名称:ssifSource,代码行数:10,代码来源:Filter.AdjustedColorDifference.cpp

示例12: Q_UNUSED

AVSValue __cdecl StillImage::CreateElements(AVSValue args, void* user_data, IScriptEnvironment* env)
{
    Q_UNUSED(user_data)
    const PClip background = args[0].AsClip();
    const VideoInfo backgroundVI = background->GetVideoInfo();
    const AVSValue &elementValues = args[1];
    QStringList elements;
    for (int i = 0; i < elementValues.ArraySize(); ++i) {
        const QLatin1String element(elementValues[i].AsString());
        if (Filters::elementAvailable(element))
            env->ThrowError("QtAviSynthElements: Invalid element '%s'.", element.latin1());
        elements.append(element);
    }
    QImage image(backgroundVI.width, backgroundVI.height, QImage::Format_ARGB32);
    image.fill(Tools::transparentColor);
    QPainter p(&image);
    Filters::paintElements(&p, elements, image.rect());
    const PClip elementsClip = new StillImage(backgroundVI, image, env);
    return new RgbOverlay(background, elementsClip, env);
}
开发者ID:aportale,项目名称:qtorials,代码行数:20,代码来源:stillimage.cpp

示例13: Create_AutoTrace

AVSValue __cdecl Create_AutoTrace(AVSValue args, void* user_data, IScriptEnvironment* env) {
	PClip clip = args[0].AsClip();
	const VideoInfo& vi = clip->GetVideoInfo();
	if (vi.IsRGB24()) {
		at_fitting_opts_type* fitting_opts = at_fitting_opts_new();
		// Setting fitting opts based on input
		fitting_opts->color_count = args[3].AsInt(0);
		int destWidth = args[1].AsInt(0);
		int destHeight = args[2].AsInt(0);
		// If the inputs are left off entirely (or 0 or negative), then use the
		// input size. If either one is left off (or 0 or negative), then
		// determine that one based on presevering the aspect ratio of the
		// given value.
		if (destWidth <= 0) {
			if (destHeight <= 0) {
				destWidth = vi.width;
				destHeight = vi.height;
			} else {
				// Calculate width based off desired height
				destWidth = destHeight * vi.width / vi.height;
			}
		} else if (destHeight <= 0) {
			// Calculate height based off desired width
			destHeight = destWidth * vi.height / vi.width;
		}
		if (args[4].Defined()) {
			// background_color
			int background = args[4].AsInt();
			if (background != -1) {
				// To match the documentation, ignore -1, even though it would
				// be a valid color. (And argueably makes more sense than
				// 0xFFFFFF, as it has the alpha channel set to full.)
				// Note that R and B are swapped. This is by design - rather
				// than convert a BGR image into an RGB image as AutoTrace
				// expects, we just let the B and R channels be "backwards" as
				// within AutoTrace.
				fitting_opts->background_color = at_color_new(
					(background & 0x0000FF),
					(background & 0x00FF00) >> 8,
					(background & 0xFF0000) >> 16);
			}
开发者ID:Xenoveritas,项目名称:AviSynth-Stuff,代码行数:41,代码来源:AutoTraceFilter.cpp

示例14: strcpy

/// @brief Read from environment
/// @param _clip
///
void AvisynthAudioProvider::LoadFromClip(AVSValue _clip) {
	AVSValue script;

	// Check if it has audio
	VideoInfo vi = _clip.AsClip()->GetVideoInfo();
	if (!vi.HasAudio()) throw agi::AudioDataNotFoundError("No audio found.", 0);

	IScriptEnvironment *env = avs_wrapper.GetEnv();

	// Convert to one channel
	char buffer[1024];
	strcpy(buffer,lagi_wxString(OPT_GET("Audio/Downmixer")->GetString()).mb_str(csConvLocal));
	script = env->Invoke(buffer, _clip);

	// Convert to 16 bits per sample
	script = env->Invoke("ConvertAudioTo16bit", script);
	vi = script.AsClip()->GetVideoInfo();

	// Convert sample rate
	int setsample = OPT_GET("Provider/Audio/AVS/Sample Rate")->GetInt();
	if (vi.SamplesPerSecond() < 32000) setsample = 44100;
	if (setsample != 0) {
		AVSValue args[2] = { script, setsample };
		script = env->Invoke("ResampleAudio", AVSValue(args,2));
	}

	// Set clip
	PClip tempclip = script.AsClip();
	vi = tempclip->GetVideoInfo();

	// Read properties
	channels = vi.AudioChannels();
	num_samples = vi.num_audio_samples;
	sample_rate = vi.SamplesPerSecond();
	bytes_per_sample = vi.BytesPerAudioSample();
	float_samples = false;

	clip = tempclip;
}
开发者ID:sthenc,项目名称:Aegisub,代码行数:42,代码来源:audio_provider_avs.cpp

示例15: ColorQuantize

    ColorQuantize(PClip originClip, int paletteSize,
                  bool useGlobalPalette, FREE_IMAGE_QUANTIZE algorithm,
                  const char *globalPaletteOutputFile, IScriptEnvironment* env)
        : m_origin(originClip)
        , m_paletteSize(paletteSize)
        , m_useGlobalPalette(useGlobalPalette)
        , m_algorithm(algorithm)
        , m_targetVideoInfo(originClip->GetVideoInfo())
        , m_globalPalette(0)
    {
        if (!originClip->GetVideoInfo().IsRGB24()) {
            m_originRgb = env->Invoke("ConvertToRgb24", originClip).AsClip();
            m_targetVideoInfo.pixel_type = VideoInfo::CS_BGR24;
        } else {
            m_originRgb = originClip;
        }

        if (m_useGlobalPalette) {
            FIBITMAP *hugeImage =
                    FreeImage_Allocate(m_targetVideoInfo.width,
                                       m_targetVideoInfo.height * m_targetVideoInfo.num_frames,
                                       24);
            for (int frame = 0; frame < m_targetVideoInfo.num_frames; ++frame) {
                const PVideoFrame videoFrame = m_originRgb->GetFrame(frame, env);
                copyVideoFrameToImage(m_originRgb->GetFrame(frame, env),hugeImage, frame * m_targetVideoInfo.height);
            }
            FIBITMAP *quantizedImage =
                    FreeImage_ColorQuantizeEx(hugeImage, algorithm, m_paletteSize);
            FreeImage_Unload(hugeImage);
            m_globalPalette = new RGBQUAD[m_paletteSize];
            memcpy(m_globalPalette, FreeImage_GetPalette(quantizedImage), m_paletteSize * sizeof(RGBQUAD));
            FreeImage_Unload(quantizedImage);
            if (globalPaletteOutputFile)
                savePaletteImage(globalPaletteOutputFile, m_globalPalette, m_paletteSize);
        }
    }
开发者ID:aportale,项目名称:qtorials,代码行数:36,代码来源:colorquantize.cpp


注:本文中的PClip类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。