本文整理汇总了C++中bl::Object类的典型用法代码示例。如果您正苦于以下问题:C++ Object类的具体用法?C++ Object怎么用?C++ Object使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Object类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: object_is_mesh
bool BlenderSync::object_is_mesh(BL::Object& b_ob)
{
BL::ID b_ob_data = b_ob.data();
if(!b_ob_data) {
return false;
}
if(b_ob.type() == BL::Object::type_CURVE) {
/* Skip exporting curves without faces, overhead can be
* significant if there are many for path animation. */
BL::Curve b_curve(b_ob.data());
return (b_curve.bevel_object() ||
b_curve.extrude() != 0.0f ||
b_curve.bevel_depth() != 0.0f ||
b_curve.dimensions() == BL::Curve::dimensions_2D ||
b_ob.modifiers.length());
}
else {
return (b_ob_data.is_a(&RNA_Mesh) ||
b_ob_data.is_a(&RNA_Curve) ||
b_ob_data.is_a(&RNA_MetaBall));
}
}
示例2: object_render_hide
static bool object_render_hide(BL::Object b_ob, bool top_level, bool parent_hide, bool& hide_triangles)
{
/* check if we should render or hide particle emitter */
BL::Object::particle_systems_iterator b_psys;
bool hair_present = false;
bool show_emitter = false;
bool hide = false;
for(b_ob.particle_systems.begin(b_psys); b_psys != b_ob.particle_systems.end(); ++b_psys) {
if((b_psys->settings().render_type() == BL::ParticleSettings::render_type_PATH) &&
(b_psys->settings().type()==BL::ParticleSettings::type_HAIR))
hair_present = true;
if(b_psys->settings().use_render_emitter()) {
hide = false;
show_emitter = true;
}
}
/* duplicators hidden by default, except dupliframes which duplicate self */
if(b_ob.is_duplicator())
if(top_level || b_ob.dupli_type() != BL::Object::dupli_type_FRAMES)
hide = true;
/* hide original object for duplis */
BL::Object parent = b_ob.parent();
if(parent && object_render_hide_original(parent.dupli_type()))
if(parent_hide)
hide = true;
hide_triangles = (hair_present && !show_emitter);
return hide && !show_emitter;
}
示例3: sync_camera_motion
void BlenderSync::sync_camera_motion(BL::RenderSettings& b_render,
BL::Object& b_ob,
int width, int height,
float motion_time)
{
if(!b_ob)
return;
Camera *cam = scene->camera;
BL::Array<float, 16> b_ob_matrix;
b_engine.camera_model_matrix(b_ob, b_ob_matrix);
Transform tfm = get_transform(b_ob_matrix);
tfm = blender_camera_matrix(tfm, cam->type, cam->panorama_type);
if(tfm != cam->matrix) {
VLOG(1) << "Camera " << b_ob.name() << " motion detected.";
if(motion_time == -1.0f) {
cam->motion.pre = tfm;
cam->use_motion = true;
}
else if(motion_time == 1.0f) {
cam->motion.post = tfm;
cam->use_motion = true;
}
}
if(cam->type == CAMERA_PERSPECTIVE) {
BlenderCamera bcam;
float aspectratio, sensor_size;
blender_camera_init(&bcam, b_render);
blender_camera_from_object(&bcam, b_engine, b_ob);
blender_camera_viewplane(&bcam,
width, height,
NULL,
&aspectratio,
&sensor_size);
/* TODO(sergey): De-duplicate calculation with camera sync. */
float fov = 2.0f * atanf((0.5f * sensor_size) / bcam.lens / aspectratio);
if(fov != cam->fov) {
VLOG(1) << "Camera " << b_ob.name() << " FOV change detected.";
if(motion_time == -1.0f) {
cam->fov_pre = fov;
cam->use_perspective_motion = true;
}
else if(motion_time == 1.0f) {
cam->fov_post = fov;
cam->use_perspective_motion = true;
}
}
}
}
示例4: object_is_mesh
bool BlenderSync::object_is_mesh(BL::Object& b_ob)
{
BL::ID b_ob_data = b_ob.data();
return (b_ob_data && (b_ob_data.is_a(&RNA_Mesh) ||
b_ob_data.is_a(&RNA_Curve) || b_ob_data.is_a(&RNA_MetaBall)));
}
示例5: BKE_object_is_modified
CCL_NAMESPACE_BEGIN
/* Utilities */
bool BlenderSync::BKE_object_is_modified(BL::Object& b_ob)
{
/* test if we can instance or if the object is modified */
if(b_ob.type() == BL::Object::type_META) {
/* multi-user and dupli metaballs are fused, can't instance */
return true;
}
else if(ccl::BKE_object_is_modified(b_ob, b_scene, preview)) {
/* modifiers */
return true;
}
else {
/* object level material links */
BL::Object::material_slots_iterator slot;
for(b_ob.material_slots.begin(slot); slot != b_ob.material_slots.end(); ++slot)
if(slot->link() == BL::MaterialSlot::link_OBJECT)
return true;
}
return false;
}
示例6: bake
void BlenderSession::bake(BL::Object b_object, const string& pass_type, BL::BakePixel pixel_array, int num_pixels, int depth, float result[])
{
ShaderEvalType shader_type = get_shader_type(pass_type);
size_t object_index = ~0;
int tri_offset = 0;
if(shader_type == SHADER_EVAL_UV) {
/* force UV to be available */
Pass::add(PASS_UV, scene->film->passes);
}
if(is_light_pass(shader_type)) {
/* force use_light_pass to be true */
Pass::add(PASS_LIGHT, scene->film->passes);
}
/* create device and update scene */
scene->film->tag_update(scene);
scene->integrator->tag_update(scene);
/* update scene */
sync->sync_camera(b_render, b_engine.camera_override(), width, height);
sync->sync_data(b_v3d, b_engine.camera_override(), &python_thread_state);
/* get buffer parameters */
SessionParams session_params = BlenderSync::get_session_params(b_engine, b_userpref, b_scene, background);
BufferParams buffer_params = BlenderSync::get_buffer_params(b_render, b_scene, b_v3d, b_rv3d, scene->camera, width, height);
/* set number of samples */
session->tile_manager.set_samples(session_params.samples);
session->reset(buffer_params, session_params.samples);
session->update_scene();
/* find object index. todo: is arbitrary - copied from mesh_displace.cpp */
for(size_t i = 0; i < scene->objects.size(); i++) {
if(strcmp(scene->objects[i]->name.c_str(), b_object.name().c_str()) == 0) {
object_index = i;
tri_offset = scene->objects[i]->mesh->tri_offset;
break;
}
}
/* when used, non-instanced convention: object = ~object */
int object = ~object_index;
BakeData *bake_data = scene->bake_init(object, tri_offset, num_pixels);
populate_bake_data(bake_data, pixel_array, num_pixels);
scene->bake(shader_type, bake_data, result);
/* free all memory used (host and device), so we wouldn't leave render
* engine with extra memory allocated
*/
session->device_free();
delete sync;
sync = NULL;
}
示例7: sync_object
void BlenderSync::sync_object(BL::Object b_parent, int b_index, BL::Object b_ob, Transform& tfm, uint visibility)
{
/* light is handled separately */
if(object_is_light(b_ob)) {
sync_light(b_parent, b_index, b_ob, tfm);
return;
}
/* only interested in object that we can create meshes from */
if(!object_is_mesh(b_ob))
return;
/* test if we need to sync */
ObjectKey key(b_parent, b_index, b_ob);
Object *object;
bool object_updated = false;
if(object_map.sync(&object, b_ob, b_parent, key))
object_updated = true;
/* mesh sync */
object->mesh = sync_mesh(b_ob, object_updated);
/* object sync */
if(object_updated || (object->mesh && object->mesh->need_update)) {
object->name = b_ob.name().c_str();
object->tfm = tfm;
object->visibility = object_ray_visibility(b_ob) & visibility;
if(b_parent.ptr.data != b_ob.ptr.data)
object->visibility &= object_ray_visibility(b_parent);
object->tag_update(scene);
}
}
示例8: sync_object
void BlenderSync::sync_object(BL::Object b_parent, int b_index, BL::Object b_ob, Transform& tfm, uint layer_flag)
{
/* light is handled separately */
if(object_is_light(b_ob)) {
sync_light(b_parent, b_index, b_ob, tfm);
return;
}
/* only interested in object that we can create meshes from */
if(!object_is_mesh(b_ob))
return;
/* test if we need to sync */
ObjectKey key(b_parent, b_index, b_ob);
Object *object;
bool object_updated = false;
if(object_map.sync(&object, b_ob, b_parent, key))
object_updated = true;
/* holdout? */
bool holdout = (layer_flag & render_layer.holdout_layer) != 0;
/* mesh sync */
object->mesh = sync_mesh(b_ob, holdout, object_updated);
/* object sync */
if(object_updated || (object->mesh && object->mesh->need_update)) {
object->name = b_ob.name().c_str();
object->pass_id = b_ob.pass_index();
object->tfm = tfm;
/* visibility flags for both parent */
object->visibility = object_ray_visibility(b_ob) & PATH_RAY_ALL;
if(b_parent.ptr.data != b_ob.ptr.data)
object->visibility &= object_ray_visibility(b_parent);
/* camera flag is not actually used, instead is tested
against render layer flags */
if(object->visibility & PATH_RAY_CAMERA) {
object->visibility |= layer_flag << PATH_RAY_LAYER_SHIFT;
object->visibility &= ~PATH_RAY_CAMERA;
}
object->tag_update(scene);
}
}
示例9: create_subd_mesh
static void create_subd_mesh(Scene *scene,
Mesh *mesh,
BL::Object& b_ob,
BL::Mesh& b_mesh,
const vector<Shader*>& used_shaders,
float dicing_rate,
int max_subdivisions)
{
BL::SubsurfModifier subsurf_mod(b_ob.modifiers[b_ob.modifiers.length()-1]);
bool subdivide_uvs = subsurf_mod.use_subsurf_uv();
create_mesh(scene, mesh, b_mesh, used_shaders, true, subdivide_uvs);
/* export creases */
size_t num_creases = 0;
BL::Mesh::edges_iterator e;
for(b_mesh.edges.begin(e); e != b_mesh.edges.end(); ++e) {
if(e->crease() != 0.0f) {
num_creases++;
}
}
mesh->subd_creases.resize(num_creases);
Mesh::SubdEdgeCrease* crease = mesh->subd_creases.data();
for(b_mesh.edges.begin(e); e != b_mesh.edges.end(); ++e) {
if(e->crease() != 0.0f) {
crease->v[0] = e->vertices()[0];
crease->v[1] = e->vertices()[1];
crease->crease = e->crease();
crease++;
}
}
/* set subd params */
if(!mesh->subd_params) {
mesh->subd_params = new SubdParams(mesh);
}
SubdParams& sdparams = *mesh->subd_params;
PointerRNA cobj = RNA_pointer_get(&b_ob.ptr, "cycles");
sdparams.dicing_rate = max(0.1f, RNA_float_get(&cobj, "dicing_rate") * dicing_rate);
sdparams.max_level = max_subdivisions;
scene->camera->update();
sdparams.camera = scene->camera;
sdparams.objecttoworld = get_transform(b_ob.matrix_world());
}
示例10: blender_camera_focal_distance
static float blender_camera_focal_distance(BL::Object b_ob, BL::Camera b_camera)
{
BL::Object b_dof_object = b_camera.dof_object();
if(!b_dof_object)
return b_camera.dof_distance();
/* for dof object, return distance along camera Z direction */
Transform obmat = transform_clear_scale(get_transform(b_ob.matrix_world()));
Transform dofmat = get_transform(b_dof_object.matrix_world());
Transform mat = transform_inverse(obmat) * dofmat;
return fabsf(transform_get_column(&mat, 3).z);
}
示例11: sync_camera_motion
void BlenderSync::sync_camera_motion(BL::Object b_ob, int motion)
{
Camera *cam = scene->camera;
Transform tfm = get_transform(b_ob.matrix_world());
tfm = blender_camera_matrix(tfm, cam->type);
if(tfm != cam->matrix) {
if(motion == -1)
cam->motion.pre = tfm;
else
cam->motion.post = tfm;
cam->use_motion = true;
}
}
示例12: object_boundbox_clip
/* TODO(sergey): Not really optimal, consider approaches based on k-DOP in order
* to reduce number of objects which are wrongly considered visible.
*/
static bool object_boundbox_clip(Scene *scene,
BL::Object& b_ob,
Transform& tfm,
float margin)
{
Camera *cam = scene->camera;
Transform& worldtondc = cam->worldtondc;
BL::Array<float, 24> boundbox = b_ob.bound_box();
float3 bb_min = make_float3(FLT_MAX, FLT_MAX, FLT_MAX),
bb_max = make_float3(-FLT_MAX, -FLT_MAX, -FLT_MAX);
bool all_behind = true;
for(int i = 0; i < 8; ++i) {
float3 p = make_float3(boundbox[3 * i + 0],
boundbox[3 * i + 1],
boundbox[3 * i + 2]);
p = transform_point(&tfm, p);
float4 b = make_float4(p.x, p.y, p.z, 1.0f);
float4 c = make_float4(dot(worldtondc.x, b),
dot(worldtondc.y, b),
dot(worldtondc.z, b),
dot(worldtondc.w, b));
p = float4_to_float3(c / c.w);
if(c.z < 0.0f) {
p.x = 1.0f - p.x;
p.y = 1.0f - p.y;
}
if(c.z >= -margin) {
all_behind = false;
}
bb_min = min(bb_min, p);
bb_max = max(bb_max, p);
}
if(!all_behind) {
if(bb_min.x >= 1.0f + margin ||
bb_min.y >= 1.0f + margin ||
bb_max.x <= -margin ||
bb_max.y <= -margin)
{
return true;
}
return false;
}
return true;
}
示例13: sync_hair
OCT_NAMESPACE_BEGIN
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Sync hair data
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void BlenderSync::sync_hair(Mesh *mesh, BL::Mesh b_mesh, BL::Object b_ob, bool motion, int time_index) {
if(!motion) {
mesh->hair_points.clear();
mesh->vert_per_hair.clear();
mesh->hair_thickness.clear();
mesh->hair_mat_indices.clear();
mesh->hair_uvs.clear();
}
if(b_ob.mode() == b_ob.mode_PARTICLE_EDIT) return;
fill_mesh_hair_data(mesh, &b_mesh, &b_ob);
} //sync_hair()
示例14: sync_camera_motion
void BlenderSync::sync_camera_motion(BL::Object b_ob, float motion_time)
{
Camera *cam = scene->camera;
BL::Array<float, 16> b_ob_matrix;
b_engine.camera_model_matrix(b_ob, b_ob_matrix);
Transform tfm = get_transform(b_ob_matrix);
tfm = blender_camera_matrix(tfm, cam->type);
if(tfm != cam->matrix) {
VLOG(1) << "Camera " << b_ob.name() << " motion detected.";
if(motion_time == -1.0f) {
cam->motion.pre = tfm;
cam->use_motion = true;
}
else if(motion_time == 1.0f) {
cam->motion.post = tfm;
cam->use_motion = true;
}
}
}
示例15: test
bool BlenderObjectCulling::test(Scene *scene, BL::Object& b_ob, Transform& tfm)
{
if(!use_camera_cull_ && !use_distance_cull_) {
return false;
}
/* Compute world space bounding box corners. */
float3 bb[8];
BL::Array<float, 24> boundbox = b_ob.bound_box();
for(int i = 0; i < 8; ++i) {
float3 p = make_float3(boundbox[3 * i + 0],
boundbox[3 * i + 1],
boundbox[3 * i + 2]);
bb[i] = transform_point(&tfm, p);
}
bool camera_culled = use_camera_cull_ && test_camera(scene, bb);
bool distance_culled = use_distance_cull_ && test_distance(scene, bb);
return ((camera_culled && distance_culled) ||
(camera_culled && !use_distance_cull_) ||
(distance_culled && !use_camera_cull_));
}