本文整理汇总了C++中ShadingPoint::get_ray方法的典型用法代码示例。如果您正苦于以下问题:C++ ShadingPoint::get_ray方法的具体用法?C++ ShadingPoint::get_ray怎么用?C++ ShadingPoint::get_ray使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ShadingPoint
的用法示例。
在下文中一共展示了ShadingPoint::get_ray方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: apply_aerial_perspective
void apply_aerial_perspective(
const InputValues& values,
const ShadingContext& shading_context,
const PixelContext& pixel_context,
const ShadingPoint& shading_point,
ShadingResult& shading_result) const
{
Spectrum sky_color;
if (m_aerial_persp_mode == AerialPerspSkyColor)
sky_color = values.m_aerial_persp_sky_color;
else
{
// Retrieve the environment shader of the scene.
const Scene& scene = shading_point.get_scene();
const EnvironmentShader* environment_shader =
scene.get_environment()->get_environment_shader();
if (environment_shader)
{
// Execute the environment shader to obtain the sky color in the direction of the ray.
InputEvaluator input_evaluator(shading_context.get_texture_cache());
const ShadingRay& ray = shading_point.get_ray();
const Vector3d direction = normalize(ray.m_dir);
ShadingResult sky;
environment_shader->evaluate(
shading_context,
pixel_context,
input_evaluator,
direction,
sky);
sky_color = sky.m_main.m_color;
}
else sky_color.set(0.0f);
}
// Compute the blend factor.
const double d = shading_point.get_distance() * m_aerial_persp_rcp_distance;
const double k = m_aerial_persp_intensity * exp(d);
const double blend = min(k, 1.0);
// Blend the shading result and the sky color.
sky_color *= static_cast<float>(blend);
shading_result.m_main.m_color *= static_cast<float>(1.0 - blend);
shading_result.m_main.m_color += sky_color;
}
示例2: intersect
void EmbreeScene::intersect(ShadingPoint& shading_point) const
{
RTCIntersectContext context;
rtcInitIntersectContext(&context);
RTCRayHit rayhit;
shading_ray_to_embree_ray(shading_point.get_ray(), rayhit.ray);
rayhit.hit.geomID = RTC_INVALID_GEOMETRY_ID;
rtcIntersect1(m_scene, &context, &rayhit);
if (rayhit.hit.geomID != RTC_INVALID_GEOMETRY_ID)
{
assert(rayhit.hit.geomID < m_geometry_container.size());
const auto& geometry_data = m_geometry_container[rayhit.hit.geomID];
assert(geometry_data);
shading_point.m_bary[0] = rayhit.hit.u;
shading_point.m_bary[1] = rayhit.hit.v;
shading_point.m_object_instance_index = geometry_data->m_object_instance_idx;
// TODO: remove regions
shading_point.m_primitive_index = rayhit.hit.primID;
shading_point.m_primitive_type = ShadingPoint::PrimitiveTriangle;
shading_point.m_ray.m_tmax = rayhit.ray.tfar;
const uint32 v0_idx = geometry_data->m_primitives[rayhit.hit.primID * 3];
const uint32 v1_idx = geometry_data->m_primitives[rayhit.hit.primID * 3 + 1];
const uint32 v2_idx = geometry_data->m_primitives[rayhit.hit.primID * 3 + 2];
if (geometry_data->m_motion_steps_count > 1)
{
const uint32 last_motion_step_idx = geometry_data->m_motion_steps_count - 1;
const uint32 motion_step_begin_idx = static_cast<uint32>(rayhit.ray.time * last_motion_step_idx);
const uint32 motion_step_end_idx = motion_step_begin_idx + 1;
const uint32 motion_step_begin_offset = motion_step_begin_idx * geometry_data->m_vertices_count;
const uint32 motion_step_end_offset = motion_step_end_idx * geometry_data->m_vertices_count;
const float motion_step_begin_time = static_cast<float>(motion_step_begin_idx) / last_motion_step_idx;
// Linear interpolation coefficients.
const float p = (rayhit.ray.time - motion_step_begin_time) * last_motion_step_idx;
const float q = 1.0f - p;
assert(p > 0.0f && p <= 1.0f);
const TriangleType triangle(
Vector3d(
geometry_data->m_vertices[motion_step_begin_offset + v0_idx] * q
+ geometry_data->m_vertices[motion_step_end_offset + v0_idx] * p),
Vector3d(
geometry_data->m_vertices[motion_step_begin_offset + v1_idx] * q
+ geometry_data->m_vertices[motion_step_end_offset + v1_idx] * p),
Vector3d(
geometry_data->m_vertices[motion_step_begin_offset + v2_idx] * q
+ geometry_data->m_vertices[motion_step_end_offset + v2_idx] * p));
shading_point.m_triangle_support_plane.initialize(triangle);
}
else
{
const TriangleType triangle(
Vector3d(geometry_data->m_vertices[v0_idx]),
Vector3d(geometry_data->m_vertices[v1_idx]),
Vector3d(geometry_data->m_vertices[v2_idx]));
shading_point.m_triangle_support_plane.initialize(triangle);
}
}
}
示例3: evaluate
void DiagnosticSurfaceShader::evaluate(
SamplingContext& sampling_context,
const PixelContext& pixel_context,
const ShadingContext& shading_context,
const ShadingPoint& shading_point,
ShadingResult& shading_result) const
{
switch (m_shading_mode)
{
case Color:
{
shading_result.set_main_to_opaque_pink_linear_rgba();
const Material* material = shading_point.get_material();
if (material)
{
const Material::RenderData& material_data = material->get_render_data();
#ifdef APPLESEED_WITH_OSL
// Execute the OSL shader if there is one.
if (material_data.m_shader_group)
{
shading_context.execute_osl_shading(
*material_data.m_shader_group,
shading_point);
}
#endif
if (material_data.m_bsdf)
{
InputEvaluator input_evaluator(shading_context.get_texture_cache());
material_data.m_bsdf->evaluate_inputs(
shading_context,
input_evaluator,
shading_point);
const Vector3d direction = -normalize(shading_point.get_ray().m_dir);
material_data.m_bsdf->evaluate(
input_evaluator.data(),
false,
false,
shading_point.get_geometric_normal(),
shading_point.get_shading_basis(),
direction,
direction,
ScatteringMode::All,
shading_result.m_main.m_color);
shading_result.m_color_space = ColorSpaceSpectral;
}
}
}
break;
case Coverage:
shading_result.set_main_to_linear_rgb(Color3f(1.0f));
break;
case Barycentric:
shading_result.set_main_to_linear_rgb(
vector2_to_color(shading_point.get_bary()));
break;
case UV:
shading_result.set_main_to_linear_rgb(
uvs_to_color(shading_point.get_uv(0)));
break;
case Tangent:
case Bitangent:
case ShadingNormal:
{
#ifdef APPLESEED_WITH_OSL
const Material* material = shading_point.get_material();
if (material)
{
const Material::RenderData& material_data = material->get_render_data();
// Execute the OSL shader if there is one.
if (material_data.m_shader_group)
{
sampling_context.split_in_place(2, 1);
shading_context.execute_osl_bump(
*material_data.m_shader_group,
shading_point,
sampling_context.next_vector2<2>());
}
}
#endif
const Vector3d v =
m_shading_mode == ShadingNormal ? shading_point.get_shading_basis().get_normal() :
m_shading_mode == Tangent ? shading_point.get_shading_basis().get_tangent_u() :
shading_point.get_shading_basis().get_tangent_v();
shading_result.set_main_to_linear_rgb(vector3_to_color(v));
}
break;
case GeometricNormal:
//.........这里部分代码省略.........
示例4: add_back_lighting
void add_back_lighting(
const InputValues& values,
SamplingContext& sampling_context,
const PixelContext& pixel_context,
const ShadingContext& shading_context,
const ShadingPoint& shading_point,
Spectrum& radiance,
SpectrumStack& aovs) const
{
const Vector3d& p = shading_point.get_point();
const Vector3d& n = shading_point.get_original_shading_normal();
const Vector3d& d = shading_point.get_ray().m_dir;
// Construct a ray perpendicular to the other side of the surface.
ShadingRay back_ray(shading_point.get_ray());
back_ray.m_tmax *= norm(d);
back_ray.m_dir = dot(d, n) > 0.0 ? -n : n;
back_ray.m_org = p - back_ray.m_tmax * back_ray.m_dir;
ShadingPoint back_shading_point(shading_point);
back_shading_point.set_ray(back_ray);
Spectrum back_radiance(0.0f);
SpectrumStack back_aovs(aovs.size(), 0.0f);
/*
#ifdef WITH_OSL
// Execute the OSL shader, if we have one.
const Material* material = back_shading_point.get_material();
if (material && material->get_osl_surface_shader())
{
shading_context.execute_osl_shadergroup(
*material->get_osl_surface_shader(),
back_shading_point);
}
#endif
*/
// Compute back lighting.
for (size_t i = 0; i < m_back_lighting_samples; ++i)
{
shading_context.get_lighting_engine()->compute_lighting(
sampling_context,
pixel_context,
shading_context,
back_shading_point,
back_radiance,
back_aovs);
}
// Apply translucency factor.
back_radiance *= values.m_translucency;
back_aovs *= values.m_translucency;
// Divide by the number of samples.
const float rcp_sample_count = 1.0f / static_cast<float>(m_back_lighting_samples);
back_radiance *= rcp_sample_count;
back_aovs *= rcp_sample_count;
// Add back lighting contribution.
radiance += back_radiance;
aovs += back_aovs;
}
示例5: sample
size_t SubsurfaceSampler::sample(
SamplingContext& sampling_context,
const ShadingPoint& outgoing_point,
const BSSRDF& bssrdf,
const void* bssrdf_data,
SubsurfaceSample samples[],
const size_t max_sample_count)
{
assert(max_sample_count > 0);
// Sample the diffusion profile.
BSSRDFSample bssrdf_sample(sampling_context);
if (!bssrdf.sample(bssrdf_data, bssrdf_sample))
return 0;
// Reject points too far away.
// This introduces negligible bias in comparison to the other approximations.
const Vector2d& point(bssrdf_sample.get_point());
const double radius2 = square_norm(point);
const double rmax2 = bssrdf_sample.get_rmax2();
if (radius2 > rmax2)
return 0;
// Evaluate the PDF of the diffusion profile.
const double radius = sqrt(radius2);
const double bssrdf_sample_pdf =
bssrdf.evaluate_pdf(bssrdf_data, bssrdf_sample.get_channel(), radius);
// Pick a sampling basis.
sampling_context.split_in_place(1, 1);
Axis sampling_axis;
Basis3d sampling_basis;
double sampling_basis_pdf;
pick_sampling_basis(
outgoing_point.get_shading_basis(),
sampling_context.next_double2(),
sampling_axis,
sampling_basis,
sampling_basis_pdf);
// Compute height of sample point on (positive) hemisphere of radius Rmax.
assert(rmax2 >= radius2);
const double h = sqrt(rmax2 - radius2);
// Compute sphere entry and exit points.
Vector3d entry_point, exit_point;
entry_point = exit_point = outgoing_point.get_point();
entry_point += sampling_basis.transform_to_parent(Vector3d(point[0], +h, point[1]));
exit_point += sampling_basis.transform_to_parent(Vector3d(point[0], -h, point[1]));
assert(feq(norm(exit_point - entry_point), 2.0 * h, 1.0e-9));
// Build a probe ray inscribed inside the sphere of radius Rmax.
ShadingRay probe_ray(
entry_point,
-sampling_basis.get_normal(),
0.0,
2.0 * h,
outgoing_point.get_time(),
VisibilityFlags::ProbeRay,
outgoing_point.get_ray().m_depth + 1);
const Material* material = outgoing_point.get_material();
ShadingPoint shading_points[2];
size_t shading_point_index = 0;
ShadingPoint* parent_shading_point = 0;
size_t sample_count = 0;
// Trace the ray and return all intersections (or up to max_sample_count of them) found inside the sphere.
while (true)
{
// Continue tracing the ray.
shading_points[shading_point_index].clear();
if (!m_shading_context.get_intersector().trace(
probe_ray,
shading_points[shading_point_index],
parent_shading_point))
break;
// Only consider points lying on surfaces with the same material as the outgoing point.
if (shading_points[shading_point_index].get_material() == material)
{
// Execute the OSL shader if we have one. Needed for bump mapping.
#ifdef APPLESEED_WITH_OSL
if (material->has_osl_surface())
{
sampling_context.split_in_place(1, 1);
m_shading_context.execute_osl_bump(
*material->get_osl_surface(),
shading_points[shading_point_index],
sampling_context.next_double2());
}
#endif
SubsurfaceSample& sample = samples[sample_count++];
sample.m_point = shading_points[shading_point_index];
// Compute sample probability.
sample.m_probability =
bssrdf_sample_pdf
* sampling_basis_pdf
//.........这里部分代码省略.........