本文整理汇总了C++中ofImage::allocate方法的典型用法代码示例。如果您正苦于以下问题:C++ ofImage::allocate方法的具体用法?C++ ofImage::allocate怎么用?C++ ofImage::allocate使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ofImage
的用法示例。
在下文中一共展示了ofImage::allocate方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: setup
//--------------------------------------------------------------
void testApp::setup(){
ofSetFrameRate(60);
img.allocate(600,200,OF_IMAGE_GRAYSCALE);
img2.allocate(600,200,OF_IMAGE_GRAYSCALE);
img3.allocate(600,200,OF_IMAGE_COLOR);
unsigned char * pixels = img.getPixels();
for(int x = 0;x<600;++x){
for(int y=0;y<200;++y){
char brightess = ofMap(ofNoise(x*faktor,y*faktor),0,1,0,255);
pixels[y*600+x] = brightess;
}
}
img.update();
}
示例2: keyPressed
//--------------------------------------------------------------
void testApp::keyPressed (int key){
if(key =='e'){
mode = "edit";
ofSetWindowShape(1280, 900);
reSort();
}
if( key == 'p' ){
//startRender();
mode = "play-back";
whichVideo = 0;
totalFrames = 0;
framecounter = 0;
nextVideo();
ofSetWindowShape(1024, 576);
img.setUseTexture(false);
img.allocate(1024, 576, OF_IMAGE_COLOR);
}
if( key == 'P' ){
//startRender();
mode = "play";
whichVideo = 0;
totalFrames = 0;
framecounter = 0;
nextVideo();
ofSetWindowShape(1024, 576);
img.setUseTexture(false);
img.allocate(1024, 576, OF_IMAGE_COLOR);
}
if( key == 'r' ){
mode = "edit";
cout << "random shuffle"<<endl;
ofRandomize(thumbs);
}
if( key == 'l' ){
loadOrder();
}
if( key == 'm' ){
mode = "move";
ofSetWindowShape(1280, 900);
reSort();
}
}
示例3: setup
void ofApp::setup()
{
ofSetFrameRate(60);
ofSetVerticalSync(true);
ofSetWindowShape(w*2, h);
synth.allocate(w, h, OF_IMAGE_GRAYSCALE);
loadImage("1.jpg");
}
示例4: normalizeImage
void testApp::normalizeImage(ofImage& img, ofImage& normalized) {
srcTracker.update(toCv(img));
if(srcTracker.getFound()) {
drawNormalized(srcTracker, img, srcNormalized);
normalized.allocate(normalizedWidth, normalizedHeight, OF_IMAGE_COLOR);
srcNormalized.readToPixels(normalized.getPixelsRef());
normalized.update();
} else {
ofLogWarning() << "couldn't find the face" << endl;
}
}
示例5: grabStill
void ofxLibdc::grabStill(ofImage& img) {
setTransmit(false);
flushBuffer();
dc1394_video_set_one_shot(camera, DC1394_ON);
// if possible, the following should be replaced with a call to grabFrame
dc1394video_frame_t *frame;
dc1394_capture_dequeue(camera, capturePolicy, &frame);
img.allocate(width, height, imageType);
if(imageType == OF_IMAGE_GRAYSCALE) {
memcpy(img.getPixels(), frame->image, width * height);
} else if(imageType == OF_IMAGE_COLOR) {
}
dc1394_capture_enqueue(camera, frame);
}
示例6: grabVideo
bool ofxLibdc::grabVideo(ofImage& img, bool dropFrames) {
setTransmit(true);
img.allocate(width, height, imageType);
if(dropFrames) {
bool remaining;
int i = 0;
do {
remaining = grabFrame(img);
if(!remaining && i == 0)
return false;
i++;
} while (remaining);
return true;
} else {
return grabFrame(img);
}
}
示例7: ofMap
void ofxDepthImageCompressor::convertTo8BitImage(unsigned short* buf, ofImage& image){
int nearPlane = 500;
int farPlane = 7000;
if(!image.isAllocated()){
image.allocate(640,480,OF_IMAGE_GRAYSCALE);
}
unsigned char* pix = image.getPixels();
int stride = image.getPixelsRef().getNumChannels();
for(int i = 0; i < 640*480; i++){
//ofMap(buf[i], nearPlane, farPlane, 255, 0, true);
unsigned char value = buf[i] == 0 ? 0 : 255 - (255 * (buf[i] - nearPlane) ) / farPlane;// + ofMap(buf[i], nearPlane, farPlane, 255, 0, true);
for(int c = 0; c < stride; c++){
pix[i*stride+c] = value;
}
}
image.update();
}
示例8: setup
void ofApp::setup() {
num=512;
int n = num * num;
float scale = .009;
float base = .008;
patterns.clear();
for(int i = 0; i < 6; i++) {
int baseSize = (int) powf(2.5, i);
patterns.push_back(TuringPattern(num, baseSize, baseSize * 2, log(baseSize) * scale + base));
}
grid.resize(n);
for (int i=0;i<n;i++) {
grid[i]=ofRandom(-1, 1);
}
buffer.allocate(num, num, OF_IMAGE_GRAYSCALE);
}
示例9: setup
//--------------------------------------------------------------
void ofApp::setup(){
ofSetFrameRate(15);
ofBackground(255);
camWidth = 640; // try to grab at this size.
camHeight = 480;
currentFrame.allocate(camWidth, camHeight, OF_IMAGE_GRAYSCALE);
vidGrabber.setVerbose(true);
vidGrabber.initGrabber(camWidth,camHeight);
font.loadFont("Courier New Bold.ttf", 9);
ofEnableAlphaBlending();
}
示例10: setup
void setup() {
ofSetVerticalSync(true);
ofBackground(0);
config = ofLoadJson("../../../SharedData/shared/config.json");
float camWidth = config["camera"]["width"];
float camHeight = config["camera"]["height"];
float camFrameRate = config["camera"]["framerate"];
device = config["camera"]["device"];
if (device == "blackmagic") {
cam = &blackmagicGrabber;
} else {
cam = &videoGrabber;
}
cam->setDesiredFrameRate(camFrameRate);
cam->setup(camWidth, camHeight);
clipping.allocate(camWidth, camHeight, OF_IMAGE_COLOR_ALPHA);
toggleGrayscale = false;
updateWindowShape();
}
示例11: getOneShot
void ofxLibdc::getOneShot(ofImage& img) {
setTransmit(false);
flush();
dc1394_video_set_one_shot(camera, DC1394_ON);
dc1394video_frame_t *frame;
dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);
img.allocate(width, height, imageType);
if(imageType == OF_IMAGE_GRAYSCALE) {
memcpy(img.getPixels(), frame->image, width * height);
} else if(imageType == OF_IMAGE_COLOR) {
// this shouldn't be reallocated every frame!
dc1394video_frame_t* rgbFrame = (dc1394video_frame_t*) calloc(1, sizeof(dc1394video_frame_t));
rgbFrame->color_coding = DC1394_COLOR_CODING_RGB8;
dc1394_convert_frames(frame, rgbFrame);
memcpy(img.getPixels(), rgbFrame->image, 3 * width * height);
free(rgbFrame);
}
img.setFromPixels(frame->image, width, height, imageType);
dc1394_capture_enqueue(camera, frame);
}
示例12: resize
// this could also be done with OpenCV, cvResize + CV_INTER_NN
// or even faster by precomputing a remap function
void threadedScanLoader::resize(ofImage& from, ofImage& to, int toWidth, int toHeight) {
to.allocate(toWidth, toHeight, OF_IMAGE_COLOR_ALPHA);
unsigned char* fromPixels = from.getPixels();
unsigned char* toPixels = to.getPixels();
int toPosition = 0;
int fromWidth = from.getWidth();
int fromHeight = from.getHeight();
for(int toy = 0; toy < toHeight; toy++) {
int fromy = (toy * fromHeight) / toHeight;
int fromPosition = fromy * fromWidth;
for(int tox = 0; tox < toWidth; tox++) {
int fromx = (tox * fromWidth) / toWidth;
int cur = (fromPosition + fromx) * 4;
toPixels[toPosition++] = fromPixels[cur + 0];
toPixels[toPosition++] = fromPixels[cur + 1];
toPixels[toPosition++] = fromPixels[cur + 2];
toPixels[toPosition++] = fromPixels[cur + 3];
}
}
}
示例13: faceColorToTexture
void faceColorToTexture(ofMesh& mesh, ofImage& image)
{
vector<ofFloatColor> &color = mesh.getColors();
int num_face = color.size() / 3;
int tex_size = ofNextPow2(ceil(sqrt(num_face)));
bool arb = ofGetUsingArbTex();
ofDisableArbTex();
image.allocate(tex_size, tex_size, OF_IMAGE_COLOR);
if (arb) ofEnableArbTex();
mesh.clearTexCoords();
image.getPixelsRef().set(0);
float texel_size = (1. / image.getWidth()) * 0.5;
for (int i = 0; i < num_face; i++)
{
int u = (i % tex_size);
int v = (i / tex_size);
ofColor c = color[i * 3];
image.setColor(u, v, c);
float uu = (float)u / image.getWidth() + texel_size;
float vv = (float)v / image.getHeight() + texel_size;
mesh.addTexCoord(ofVec2f(uu, vv));
mesh.addTexCoord(ofVec2f(uu, vv));
mesh.addTexCoord(ofVec2f(uu, vv));
}
image.update();
mesh.clearColors();
}
示例14: getClipping
//--------------------------------------------------------------
void captureApp::getClipping(ofImage& img, ofImage& clipping) {
int w = img.getWidth();
int h = img.getHeight();
clipping.allocate(w, h, OF_IMAGE_COLOR_ALPHA);
unsigned char* imgPixels = img.getPixels();
unsigned char* clippingPixels = clipping.getPixels();
int n = w * h;
for(int i = 0; i < n; i++) {
if(imgPixels[i*3] == 0 || imgPixels[i*3] == 255 ||
imgPixels[i*3+1] == 0 || imgPixels[i*3+1] == 255 ||
imgPixels[i*3+2] == 0 || imgPixels[i*3+2] == 255 ) {
clippingPixels[i * 4 + 0] = 255;
clippingPixels[i * 4 + 1] = 255;
clippingPixels[i * 4 + 2] = 255;
clippingPixels[i * 4 + 3] = 255;
} else {
clippingPixels[i * 4 + 0] = 0;
clippingPixels[i * 4 + 1] = 0;
clippingPixels[i * 4 + 2] = 0;
clippingPixels[i * 4 + 3] = 0;
}
}
}
示例15: resolutionChanged
// ---------------------------------------------------------------------------------------
//
void resolutionChanged( int &_res )
{
destImage.allocate( _res, _res, OF_IMAGE_COLOR );
}