forked from mmp/pbr-book-website
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathPreface.html
982 lines (837 loc) · 41.6 KB
/
Preface.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
<!doctype html>
<html lang="en">
<head>
<!-- all praise to https://realfavicongenerator.net -->
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png">
<link rel="manifest" href="/site.webmanifest">
<link rel="mask-icon" href="/safari-pinned-tab.svg" color="#5bbad5">
<meta name="msapplication-TileColor" content="#da532c">
<meta name="theme-color" content="#ffffff">
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<link rel="stylesheet" href="/fonts.css">
<link rel="stylesheet" href="/fontawesome-free-5.15.3-web/css/all.css">
<link rel="stylesheet" href="/bootstrap.min.css">
<script async src="https://cse.google.com/cse.js?cx=003601324460585362024:4xwpwgaitgd"></script>
<script src="/react.min.js"></script>
<script src="/react-dom.min.js"></script>
<script src="/jeri.min.js"></script>
<link rel="preload" href="/exr.worker.js" as="script" crossorigin="anonymous">
<link rel="stylesheet" href="/3ed-2018/pbrstyle.css">
<script src="/3ed-2018/pbrt-display.js"></script>
<title>Preface</title>
</head>
<body>
<nav class="fixed-top-lg-navbar navbar navbar-expand">
<ul class="nav navbar-nav">
<a class="navbar-brand" href="/3ed-2018/contents.html"><img src="/3ed-2018/pbr.jpg" width=25 height=25></a>
<li class="nav-item"><a class="nav-link" href="contents.html">Physically Based Rendering: </a></li>
<li class="nav-item"><a class="nav-link" href="#">Preface</a></li>
</ul>
<ul class="nav navbar-nav ml-auto d-none d-md-block">
<li class="nav-item"><div class="gcse-search"></div></li>
</ul>
<ul class="nav navbar-nav d-block d-md-none">
<li class="nav-item"><div class="gcse-search"></div></li>
</ul>
<ul class="nav navbar-nav d-block">
<li class="nav-item"><button class="displaymode" onclick="TogglePBRTDisplayMode()"></button></li>
</ul>
</nav>
<div class="maincontainer">
<div class="container-fluid">
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
<a href="#"><i class="fas fa-link "></i></a>
</div>
<div class="col-md-10 col-lg-8">
<h1> Preface</h1><p>
</p>
<p> <em>[Just as] other information
should be available to those who want to
learn and understand, program source code is the only means for programmers
to learn the art from their predecessors. It would be unthinkable for
playwrights not to allow other playwrights to read their plays [or to
allow them] at theater performances where they would be barred even from
taking notes. Likewise, any good author is well read, as every child who
learns to write will read hundreds of times more than it
writes. Programmers, however, are expected to invent the alphabet and learn
to write long novels all on their own. Programming cannot grow and learn
unless the next generation of programmers has access to the knowledge and
information gathered by other programmers before them.</em>
—Erik Naggum
</p>
<p>
</p>
<p> Rendering is a fundamental component of computer graphics. At
the highest
level of abstraction, rendering is the process of converting a
description of a three-dimensional scene into an image. Algorithms for
animation, geometric modeling, texturing, and other areas of computer
graphics all must pass their results through some sort of rendering process
so that they can be made visible in an image. Rendering
has become ubiquitous; from movies to games and beyond, it has opened new
frontiers for creative expression, entertainment, and visualization.
</p>
<p>In the early years of the field, research in rendering focused on solving
fundamental problems such as determining which objects are visible from a
given viewpoint. As effective solutions to these problems have been found and
as richer and more realistic scene descriptions have
become available thanks to
continued progress in other areas of graphics, modern rendering has grown
to include ideas from a broad range of disciplines, including physics
and astrophysics, astronomy, biology, psychology and the study of
perception, and pure and applied mathematics. The interdisciplinary nature
of rendering is one of the reasons that it is such a fascinating area of
study.
</p>
<p>This book presents a selection of modern rendering algorithms through the
documented source code for a complete rendering system. Nearly all of the images
in this book, including the one on the front cover, were
rendered by this software. All of the algorithms that came together to
generate these images are described in these pages. The system,
<tt>pbrt</tt>, is written using a programming methodology called <em>literate
programming</em> that mixes prose describing the system with the source code
that implements it. We believe that the literate programming approach is a
valuable way to introduce ideas in computer graphics and computer science
in general. Often, some of the subtleties of an algorithm can be unclear
or hidden until it is implemented, so seeing an actual implementation is a
good way to acquire a solid understanding of that algorithm’s details.
Indeed, we believe that deep understanding of a small number of algorithms
in this manner provides a stronger base for further study of computer
graphics than does superficial understanding of many.
</p>
<p>In addition to clarifying how an algorithm is implemented in practice,
presenting these algorithms in the context of a complete and nontrivial
software system also allows us to address issues in the design and
implementation of medium-sized rendering systems. The design of a
rendering system’s basic abstractions and interfaces has substantial
implications for both the elegance of the implementation and the ability to
extend it later, yet the trade-offs in this design space are rarely
discussed.
</p>
<p><tt>pbrt</tt> and the contents of this book focus exclusively on <em>photorealistic
rendering</em>, which can be defined variously as the task of generating images
that are indistinguishable from those that a camera would capture in a
photograph or as the task of generating images that evoke
the same response from a human observer as
looking at the actual scene. There are many reasons to focus on
photorealism. Photorealistic images are crucial for
the movie special-effects industry because computer-generated imagery
must often be mixed seamlessly with footage of the real world.
In entertainment applications where all of the imagery is synthetic,
photorealism is an effective tool for making the observer forget that he or
she is looking at an environment that does not actually exist. Finally,
photorealism gives a reasonably well-defined metric for evaluating the
quality of the rendering system’s output.
</p>
<p>
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
<h3>Audience</h3><p>
</p>
<p> There are three main audiences that this book is intended for.
The first is students in graduate or upper-level undergraduate computer
graphics classes. This book assumes existing knowledge of computer
graphics at the level of an introductory college-level course, although
certain key concepts such as basic vector geometry and transformations will
be reviewed here. For students who do not have experience with programs
that have tens of thousands of lines of source code, the literate
programming style gives a gentle introduction to this complexity. We pay
special attention to explaining the reasoning behind some of the key
interfaces and abstractions in the system in order to give these readers a
sense of why the system is structured in the way that it is.
</p>
<p>The second audience is advanced graduate students and researchers in
computer graphics. For those doing research in rendering, the book provides
a broad introduction to the area, and the <tt>pbrt</tt> source code provides a
foundation that can be useful to build upon (or at least to use bits of
source code from). For those working in other areas, we believe that
having a thorough understanding of rendering can be helpful context to
carry along.
</p>
<p>Our final audience is software developers in industry. Although many of
the ideas in this book will likely be familiar to this audience, seeing
explanations of the algorithms presented in the literate style may provide
new perspectives. <tt>pbrt</tt> includes implementations of a number of advanced
and/or difficult-to-implement algorithms and techniques, such as
subdivision surfaces, Monte Carlo sampling algorithms, bidirectional path
tracing, Metropolis sampling, and subsurface scattering; these should be of
particular interest to experienced practitioners in rendering. We hope
that delving into one particular organization of a complete and nontrivial
rendering system will also be thought provoking to this audience.
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
<h3>Overview and Goals</h3><p>
</p>
<p><tt>pbrt</tt> is based on the <em>ray-tracing</em> algorithm. Ray tracing is an
elegant technique that has its origins in lens making; Carl Friedrich
Gauß traced rays through lenses by hand in the 19th century.
Ray-tracing algorithms on computers follow the path of infinitesimal rays
of light through the scene until they intersect a surface. This approach
gives a simple method for finding the first visible object as seen from any
particular position and direction and is the basis for many rendering
algorithms.
</p>
<p><tt>pbrt</tt> was designed and implemented with three main goals in mind: it should
be <em>complete</em>, it should be <em>illustrative</em>, and it should be
<em>physically based</em>.
</p>
<p>Completeness implies that the system should not lack key features found in
high-quality commercial rendering systems. In particular, it means that
important practical issues, such as antialiasing, robustness, numerical
precision, and the
ability to efficiently render complex scenes, should all be addressed
thoroughly. It is important to consider these issues from the start of the
system’s design, since these features can have subtle implications for all
components of the system and can be quite difficult to retrofit into the
system at a later stage of implementation.
</p>
<p>Our second goal means that we tried to choose algorithms, data structures,
and rendering techniques with care and with an eye toward readability and
clarity. Since their implementations will be examined by more readers
than is the case for many other rendering systems, we tried to select the
most elegant algorithms that we were aware of and implement them as well as
possible. This goal also required that the system be small enough for a
single person to understand completely. We have implemented <tt>pbrt</tt> using an
extensible architecture, with the core of the system implemented in
terms of a set of carefully designed abstract base classes, and as much of
the specific functionality as possible in implementations of these base classes.
The result is that one
doesn’t need to understand all of the specific implementations in order to
understand the basic structure of the system. This makes it easier to
delve deeply into parts of interest and skip others, without losing sight
of how the overall system fits together.
</p>
<p>There is a tension between the two goals of being complete and being
illustrative. Implementing and describing every possible useful technique
would not only make this book unacceptably long, but also would make the
system prohibitively complex for most readers. In cases where <tt>pbrt</tt> lacks
a particularly useful feature, we have attempted to design the architecture
so that the feature could be added without altering the overall system
design.
</p>
<p>The basic foundations for physically based rendering are the laws of
physics and their mathematical expression. <tt>pbrt</tt> was designed to use the
correct physical units and concepts for the quantities it computes and the
algorithms it implements. When configured to do so, <tt>pbrt</tt> can compute
images that are <em>physically correct</em>; they accurately reflect the
lighting as it would be in a real-world version of the scene. One
advantage of the decision to use a physical basis is that it gives a
concrete standard of program correctness: for simple scenes, where the
expected result can be computed in closed form, if <tt>pbrt</tt> doesn’t compute
the same result, we know there must be a bug in the implementation.
Similarly, if different physically based lighting algorithms in <tt>pbrt</tt> give
different results for the same scene, or if
<tt>pbrt</tt> doesn’t give the same results as another physically based renderer,
there is certainly an error in one of them. Finally, we believe that this
physically based approach to rendering is valuable because it is rigorous.
When it is not clear how a particular computation should be performed,
physics gives an answer that guarantees a consistent result.
</p>
<p>Efficiency was given lower priority than these three goals. Since
rendering systems often run for many minutes or hours in the course of
generating an image, efficiency is clearly important. However, we have
mostly confined ourselves to <em>algorithmic</em> efficiency rather than
low-level code optimization. In some cases, obvious micro-optimizations
take a backseat to clear, well-organized code, although we did make some
effort to optimize the parts of the system where most of the computation
occurs.
</p>
<p>In the course of presenting <tt>pbrt</tt> and discussing its implementation, we
hope to convey some hard-learned lessons from years of rendering research
and development. There is more to writing a good renderer than stringing
together a set of fast algorithms; making the system both flexible and
robust is a difficult task. The system’s performance must degrade
gracefully as more geometry or light sources are added to it or as any
other axis of complexity is pushed. Numerical stability must be handled
carefully, and algorithms that don’t waste floating-point precision are
critical.
</p>
<p>The rewards for developing a system that addresses all these issues are
enormous—it is a great pleasure to write a new renderer or add a new
feature to an existing renderer and use it to create an image that couldn’t
be generated before. Our most fundamental goal in writing this book was to
bring this opportunity to a wider audience. Readers are encouraged to use
the system to render the example scenes in the <tt>pbrt</tt> software distribution
as they progress through the book. Exercises at the end of each chapter suggest
modifications to the system that will help clarify its inner workings and
more complex projects to extend the system by adding new features.
</p>
<p>The Web site for this book is located at <a href="https://pbrt.org">pbrt.org</a>. The latest
version of the <tt>pbrt</tt> source code is available from this site, and we
will also post errata and bug fixes, additional
scenes to render, and supplemental utilities. Any
bugs in <tt>pbrt</tt> or errors in this text that are not listed at the Web site
can be reported to the email address <em>[email protected]</em>. We greatly
value your feedback!
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
<h3>Changes Between The First and Second Editions</h3><p>
</p>
<p>Six years passed between the publication of the first edition of this book
in 2004 and the second edition in 2010.
In that time, thousands of copies of the book were sold, and the <tt>pbrt</tt> software was downloaded thousands of times from the book’s Web site.
The <tt>pbrt</tt> user base gave us a significant amount of feedback and
encouragement, and
our experience with the system guided many of the decisions we made in making
changes between the version of <tt>pbrt</tt> presented in the first edition and the
version in the second edition. In addition to a number of bug fixes, we also
made several significant design changes and enhancements:
</p>
<p></p>
<ul>
<li> Removal of the plugin architecture. The first version of <tt>pbrt</tt> used a
run-time plugin architecture to dynamically load code for implementations
of objects like shapes, lights, integrators, cameras, and other objects
that were used in the scene currently being rendered. This approach
allowed users to extend <tt>pbrt</tt> with new object types (e.g., new shape
primitives) without recompiling the entire rendering system. This approach
initially seemed elegant, but it complicated the task of
supporting <tt>pbrt</tt> on multiple platforms and it made debugging more difficult.
The only new usage scenario that it truly enabled (binary-only
distributions of <tt>pbrt</tt> or binary plugins) was actually contrary to our
pedagogical and open-source goals. Therefore, the plugin architecture was
dropped in this edition.
<li> Removal of the image-processing pipeline. The first version of <tt>pbrt</tt> provided a tone-mapping interface that converted high-dynamic-range (HDR)
floating-point output images directly into low-dynamic-range TIFFs for
display. This functionality made sense in 2004, as support for HDR images
was still sparse. In 2010, however, advances in digital photography had
made HDR images commonplace. Although the theory and practice of tone mapping are
elegant and worth learning, we decided to focus the new book exclusively on
the process of image formation and skip the topic of image display.
Interested readers should read the book written by Reinhard et al. (<a href="Preface/Further_Reading.html#cite:Reinhard10">2010</a>) for a
thorough and modern treatment of the HDR image display process.
<li> Task parallelism. Multicore architectures became ubiquitous, and we
felt that <tt>pbrt</tt> would not remain relevant without the ability to scale to
the number of locally available cores. We also hoped that the parallel
programming implementation details documented in this book would help
graphics programmers understand some of the subtleties and complexities in
writing scalable parallel code (e.g., choosing appropriate task
granularities), which is still a difficult and
too infrequently taught topic.
<li> Appropriateness for “production” rendering. The first version of
<tt>pbrt</tt> was intended exclusively as a pedagogical tool and a stepping-stone
for rendering research. Indeed, we made a number of decisions in preparing
the first edition that were contrary to use in a production environment,
such as limited support for image-based lighting, no support for motion
blur, and a photon mapping implementation that wasn’t robust in the
presence of complex lighting. With much improved support for these
features as well as support for subsurface scattering and Metropolis light
transport, we feel that with the second edition, <tt>pbrt</tt> became much more
suitable for rendering very high-quality images of complex environments.
</ul><p>
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
<h3>Changes Between The Second and Third Editions</h3><p>
</p>
<p>With the passage of another six years, it was time to update and extend the
book and the <tt>pbrt</tt> system. We continued to learn from readers’ and users’
experiences to better understand which topics were most useful to cover.
Further, rendering research continued apace; many parts of the book were
due for an update to reflect current best practices. We made significant
improvements on a number of fronts:
</p>
<p></p>
<ul>
<li> Bidirectional light transport. The third version of <tt>pbrt</tt> now
includes a full-featured bidirectional path tracer, including full support for
volumetric light transport and multiple importance
sampling to weight paths. An all-new Metropolis light transport
integrator uses components of the bidirectional path tracer, allowing
for a particularly succinct implementation of that algorithm. The foundations of these
algorithms were established approximately fifteen years ago; it’s overdue
to have solid support for them in <tt>pbrt</tt>.
<li> Subsurface scattering. The appearance of many objects—notably, skin
and translucent objects—is a result of subsurface light transport. Our
implementation of subsurface scattering in the second edition reflected the
state of the art in the early 2000s; we have thoroughly updated both our
BSSRDF models and our subsurface light transport algorithms to reflect the
progress made in ten subsequent years of research. We now use a considerably
more accurate diffusion solution together with a ray-tracing-based sampling
technique, removing the need for the costly preprocessing step used in the
second edition.
<li> Numerically robust intersections. The effects of floating-point
round-off error in
geometric ray intersection calculations have been a long-standing challenge
in ray tracing: they can cause small errors to be present throughout the
image. We have focused on this issue and derived conservative (but tight)
bounds of this error, which makes our implementation more robust to this
issue than previous rendering systems.
<li> Participating media representation. We have significantly improved
the way that scattering media are described and represented in the
system; this allows for more accurate results with nested scattering media.
A new sampling technique enables unbiased rendering of heterogeneous media
in a way that cleanly integrates with all of the other parts of the system.
<li> Measured materials. This edition includes a new technique to represent
and evaluate measured materials using a sparse frequency-space basis. This
approach is convenient because it allows for exact importance sampling, which
was not possible with the representation used in the previous edition.
<li> Photon mapping. A significant step forward for photon mapping
algorithms has been the development of variants that don’t require storing
all of the photons in memory. We have replaced <tt>pbrt</tt>’s photon mapping
algorithm with an implementation based on stochastic progressive photon
mapping, which efficiently renders many difficult light transport effects.
<li> Sample generation algorithms. The distribution of sample values used
for numerical integration in rendering algorithms can have a surprisingly
large effect on the quality of the final results. We have thoroughly
updated our treatment of this topic, covering new approaches and efficient
implementation techniques in more depth than before.
</ul><p>
</p>
<p>Many other parts of the system have been improved and updated to reflect
progress in the field: microfacet reflection models are treated in more
depth, with much better sampling techniques; a new “curve” shape has been
added for modeling hair and other fine geometry; and a new camera model
that simulates realistic lens systems is now available. Throughout the
book, we have made numerous smaller changes to more clearly explain and
illustrate the key concepts in physically based rendering systems like <tt>pbrt</tt>.
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
<h3>Acknowledgments</h3><p>
</p>
<p> Pat Hanrahan has contributed to this book in more ways than we
could hope to acknowledge; we owe a profound debt to him. He tirelessly
argued for clean interfaces and finding the right abstractions to use
throughout the system, and his understanding of and approach to rendering
deeply influenced its design. His willingness to use <tt>pbrt</tt> and this
manuscript in his rendering course at Stanford was enormously helpful,
particularly in the early years of its life when it was still in very rough
form; his feedback throughout this process has been crucial for bringing
the text to its current state. Finally, the group of people that Pat
helped assemble at the Stanford Graphics Lab, and the open environment that
he fostered, made for an exciting, stimulating, and fertile environment.
Matt and Greg both feel extremely privileged to have been there.
</p>
<p>We owe a debt of gratitude to the many students who used early drafts of
this book in courses at Stanford and the University of Virginia between
1999 and 2004. These students provided an enormous amount of feedback
about the book and <tt>pbrt</tt>. The teaching assistants for these courses
deserve special mention: Tim Purcell, Mike Cammarano, Ian Buck, and Ren Ng
at Stanford, and Nolan Goodnight at Virginia. A number of students in
those classes gave particularly valuable feedback and sent bug reports and
bug fixes; we would especially like to thank Evan Parker and Phil Beatty.
A draft of the manuscript of this book was used in classes taught by Bill
Mark and Don Fussell at the University of Texas, Austin, and Raghu
Machiraju at Ohio State University; their feedback was invaluable, and we
are grateful for their adventurousness in incorporating this system into
their courses, even while it was still being edited and revised.
</p>
<p>Matt Pharr would like to acknowledge colleagues and co-workers in
rendering-related endeavors who have been a great source of education and
who have substantially influenced his approach to writing renderers and his
understanding of the field. Particular thanks go to Craig Kolb, who
provided a cornerstone of Matt’s early computer graphics education through
the freely available source code to the <tt>rayshade</tt> ray-tracing system,
and Eric Veach, who has also been generous with his time and expertise.
Thanks also to Doug Shult and Stan Eisenstat for formative lessons in
mathematics and computer science during high school and college,
respectively, and most important to Matt’s parents, for the education
they’ve provided and continued encouragement along the way. Finally, thanks
also to Nick Triantos, Jayant Kolhe, and NVIDIA for their understanding and
support through the final stages of the preparation of the first edition of
the book.
</p>
<p>Greg Humphreys is very grateful to all the professors and TAs who tolerated
him when he was an undergraduate at Princeton. Many people encouraged his
interest in graphics, specifically Michael Cohen, David Dobkin, Adam
Finkelstein, Michael Cox, Gordon Stoll, Patrick Min, and Dan Wallach. Doug
Clark, Steve Lyon, and Andy Wolfe also supervised various independent
research boondoggles without even laughing once. Once, in a group meeting
about a year-long robotics project, Steve Lyon became exasperated and
yelled, “Stop telling me why it can’t be done, and figure out how to do
it!”—an impromptu lesson that will never be forgotten. Eric Ristad
fired Greg as a summer research assistant after his freshman year (before
the summer even began), pawning him off on an unsuspecting Pat Hanrahan and
beginning an advising relationship that would span 10 years and both
coasts. Finally, Dave Hanson taught Greg that literate programming was a
great way to work and that computer programming can be a beautiful and
subtle art form.
</p>
<p>Wenzel Jakob was excited when the first edition of <tt>pbrt</tt> arrived in his mail
during his undergraduate studies in 2004. Needless to say, this had a lasting
effect on his career—thus Wenzel would like to begin by thanking his
co-authors for inviting him to become a part of third edition of this book.
Wenzel is extremely indebted to Steve Marschner, who was his PhD advisor during
a fulfilling five years at Cornell University. Steve brought him into the world
of research and remains a continuous source of inspiration. Wenzel is also
thankful for the guidance and stimulating research environment created by the
other members of the graphics group, including Kavita Bala, Doug James, and
Bruce Walter. Wenzel spent a wonderful postdoc with Olga Sorkine Hornung who
introduced him to geometry processing. Olga’s support for Wenzel’s involvement
in this book is deeply appreciated.
</p>
<p>For the first edition, we are also grateful to Don Mitchell, for his help
with understanding some of the details of sampling and reconstruction;
Thomas Kollig and Alexander Keller, for explaining the finer points of
low-discrepancy sampling; and Christer Ericson, who had a number of suggestions for
improving our kd-tree implementation. For the second edition, we’re
thankful to Christophe Hery and Eugene d’Eon for helping us with the
nuances of subsurface scattering.
</p>
<p>For the third edition, we’d especially like to thank
Leo Grünschloß for reviewing our sampling chapter;
Alexander Keller for suggestions about topics for that chapter;
Eric Heitz for extensive help with microfacets (and reviewing our text on
that topic);
Thiago Ize for thoroughly reviewing the text on floating-point error;
Tom van Bussel for reporting a number of errors in our BSSRDF code;
Ralf Habel for reviewing our BSSRDF text;
and Toshiya Hachisuka and Anton Kaplanyan for extensive review and comments
about our light transport chapters.
Discussions with Eric Veach about floating-point round-off error and ray
tracing were extremely helpful to our development of our approach to that
topic.
We’d also like to thank
Per Christensen,
Doug Epps,
Luca Fascione,
Marcos Fajardo,
Christophe Hery,
John “Spike” Hughes,
Andrew Kensler,
Alan King,
Chris Kulla,
Morgan McGuire,
Andy Selle, and
Ingo Wald for helpful discussions, suggestions, and pointers to research.
</p>
<p>We would also like to thank the book’s reviewers, all of whom had
insightful and constructive feedback about the manuscript at various stages
of its progress. We’d particularly like to thank the reviewers who
provided feedback on both the first and second editions of the book:
Ian Ashdown,
Per Christensen,
Doug Epps,
Dan Goldman,
Eric Haines,
Erik Reinhard,
Pete Shirley,
Peter-Pike Sloan,
Greg Ward, and a host of anonymous reviewers.
For the second edition,
Janne Kontkanen,
Nelson Max,
Bill Mark,
and
Eric Tabellion
also contributed numerous helpful suggestions.
</p>
<p>Many people have contributed to not only <tt>pbrt</tt> but to our own
better understanding of rendering through bug reports, patches, and
suggestions about better implementation approaches. A few have made
particularly substantial contributions over the years—we would especially
like to thank
Solomon Boulos,
Stephen Chenney,
John Danks,
Kevin Egan,
Volodymyr Kachurovskyi,
and
Ke Xu.
</p>
<p>In addition, we would like to thank
Rachit Agrawal,
Frederick Akalin,
Mark Bolstad,
Thomas de Bodt,
Brian Budge,
Mark Colbert,
Yunjian Ding,
Tao Du,
Shaohua Fan,
Etienne Ferrier,
Nigel Fisher,
Jeppe Revall Frisvad,
Robert G. Graf,
Asbjørn Heid,
Keith Jeffery,
Greg Johnson,
Aaron Karp,
Donald Knuth,
Martin Kraus,
Murat Kurt,
Larry Lai,
Craig McNaughton,
Swaminathan Narayanan,
Anders Nilsson,
Jens Olsson,
Vincent Pegoraro,
Srinath Ravichandiran,
Sébastien Speierer,
Nils Thuerey,
Xiong Wei,
Wei-Wei Xu,
Arek Zimny,
and
Matthias Zwicker for their suggestions and bug reports.
Finally, we would like to thank the <em>LuxRender</em> developers and
the <em>LuxRender</em> community, particularly
Terrence Vergauwen,
Jean-Philippe Grimaldi, and
Asbjørn Heid;
it has been a delight to see the rendering system they have built from <tt>pbrt</tt>’s
foundation, and we have learned from reading their source code and
implementations of new rendering algorithms.
</p>
<p>Special thanks to Martin Preston and Steph Bruning from Framestore for
their help with our being able to use a frame from <em>Gravity</em> (image
courtesy of Warner Bros. and Framestore), and to Joe Letteri, Dave Gouge,
and Luca Fascione from Weta Digital for their help with the frame from
<em>The Hobbit: The Battle of the Five Armies</em> (© 2014 Warner
Bros. Entertainment Inc. and Metro-Goldwyn-Mayer Pictures Inc. (US, Canada
& New Line Foreign Territories), © 2014 Metro-Goldwyn-Mayer Pictures
Inc. and Warner Bros. Entertainment Inc. (all other territories). All
Rights Reserved.
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
<a href="#"><i class="fas fa-link h3h4marginlink"></i></a>
</div>
<div class="col-md-10 col-lg-8">
<h4>Production</h4><p>
</p>
<p>For the production of the first edition, we would also like to thank Tim
Cox (senior editor), for his willingness to take on this slightly
unorthodox project and for both his direction and patience throughout the
process. We are very grateful to Elisabeth Beller (project manager), who
has gone well beyond the call of duty for this book; her ability to keep
this complex project in control and on schedule has been remarkable, and we
particularly thank her for the measurable impact she has had on the quality
of the final result. Thanks also to Rick Camp (editorial assistant) for his
many contributions along the way. Paul Anagnostopoulos and Jacqui Scarlott
at Windfall Software did the book’s composition; their ability to take the
authors’ homebrew literate programming file format and turn it into
high-quality final output while also juggling the multiple unusual types of
indexing we asked for is greatly appreciated. Thanks also to Ken DellaPenta
(copyeditor) and Jennifer McClain (proofreader) as well as to Max Spector
at Chen Design (text and cover designer), and Steve Rath (indexer).
</p>
<p>For the second edition, we’d like to thank Greg Chalson who talked us into
expanding and updating the book; Greg also ensured that Paul
Anagnostopoulos at Windfall Software would again do the book’s
composition.
We’d like to thank Paul again for his efforts in working with this book’s
production complexity. Finally, we’d also like to thank Todd Green, Paul
Gottehrer, and Heather Scherer at Elsevier.
</p>
<p>For the third edition, we’d like to thank Todd Green from Elsevier, who
oversaw this go-round, and Amy Invernizzi, who kept the train on the rails
throughout the process. We were delighted to have Paul Anagnostopoulos at
Windfall Software part of this process for a third time; his efforts have
been critical to the book’s high production value, which is so important
to us.
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
<a href="#"><i class="fas fa-link h3h4marginlink"></i></a>
</div>
<div class="col-md-10 col-lg-8">
<h4>Scenes and Models</h4><p>
</p>
<p>Many people and organizations have generously supplied us with scenes
and models for use in this book and the <tt>pbrt</tt> distribution. Their
generosity has been invaluable in helping us create interesting example
images throughout the text.
</p>
<p>The bunny, Buddha, and dragon models are courtesy of the Stanford Computer
Graphics Laboratory’s scanning repository. The “killeroo” model is included with
permission of Phil Dench and Martin Rezard (3D scan and digital
representations by headus, design and clay sculpt by Rezard). The dragon
model scan used in Chapters <a href="Reflection_Models.html#chap:reflection-models">8</a> and <a href="Materials.html#chap:materials">9</a> is courtesy of Christian Schüller,
and thanks to Yasutoshi Mori for the sports car used in Chapters <a href="Sampling_and_Reconstruction.html#chap:sampling-reconstruction">7</a>
and <a href="Light_Sources.html#chap:lights">12</a>. The glass used to illustrate caustics in Figures 16.9 and 16.11 is
thanks to Simon Wendsche, and the physically accurate smoke data sets were
created by Duc Nguyen and Ron Fedkiw.
</p>
<p>The head model used to illustrate subsurface scattering was made available
by Infinite Realities, Inc. under a Creative Commons Attribution 3.0
license. Thanks to “Wig42” for the breakfast table scene used in
Figure 16.8 and “guismo” for the coffee splash scene used in Figure 15.5;
both were posted to <a href="http://blendswap.com">blendswap.com</a> also under a Creative Commons
Attribution 3.0 license.
</p>
<p>Nolan Goodnight created environment maps with a realistic skylight model,
and Paul Debevec provided numerous high dynamic-range environment maps.
Thanks also to Bernhard Vogl (<a href="http://dativ.at/lightprobes/">dativ.at/lightprobes/</a>) for
environment maps that we used in numerous figures.
Marc Ellens provided spectral data for a variety of light sources, and the
spectral RGB measurement data for a variety of displays is courtesy of Tom
Lianza at X-Rite.
</p>
<p>We are most particularly grateful to Guillermo M. Leal Llaguno of Evolución
Visual, <a href="http://www.evvisual.com">www.evvisual.com</a>, who modeled and rendered the San Miguel
scene that was featured on the cover of the second edition and is still
used in numerous figures in the book. We would also especially like to
thank Marko Dabrovic (<a href="http://www.3lhd.com">www.3lhd.com</a>) and Mihovil Odak at RNA Studios
(<a href="http://www.rna.hr">www.rna.hr</a>), who supplied a bounty of excellent models and scenes,
including the Sponza atrium, the Sibenik cathedral, and the Audi TT car
model. Many thanks are also due to Florent Boyer, who provided the contemporary house scene
used in some of the images in Chapter <a href="Light_Transport_III_Bidirectional_Methods.html#chap:bidir-methods">16</a>.
</p>
<p>
</p>
<p>
</p>
<p>
</p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
<div class="row">
<div class="col-md-1 col-lg-2 leftcolumn">
</div>
<div class="col-md-10 col-lg-8">
<h3>About The Cover</h3><p>
</p>
<p>The “Countryside” scene on the cover of the book was created by
Jan-Walter Schliep, Burak Kahraman, and Timm Dapper of Laubwerk
(<a href="http://www.laubwerk.com">www.laubwerk.com</a>). The scene features 23,241 individual plants,
with a total of 3.1 billion triangles. (Thanks to object instancing, only
24 million triangles need to be stored in memory.) The <tt>pbrt</tt> files that
describe the scene geometry require 1.1 GB of on-disk storage. There are a
total of 192 texture maps, representing 528 MB of texture data. The scene
is one of the example scenes that are available from the <tt>pbrt</tt> Web site.
</p>
<p>
</p>
<p>
</p>
<p></p>
</div> <!-- col-md-10 col-lg-8 -->
<div class="col-md-1 col-lg-2">
</div> <!-- col-md-1 col-lg-2 -->
</div> <!-- row -->
</div> <!-- container-fluid -->
</div> <!-- maincontainer -->
<nav class="navbar navbar-expand-md">
<div class="container-fluid-nav">
<!-- <ul class="nav navbar-nav navbar-center"> -->
<span class="navbar-text" style="text-align: center;">
Thanks to Aras Pranckevicius and 34 others for generously supporting <i>Physically
Based Rendering</i> online
through <a href="https://patreon.com/pbrbook">Patreon</a>.
</span>
</div>
</nav>
<nav class="navbar navbar-expand-md">
<span class="navbar-text"><i>Physically Based Rendering: From Theory To Implementation</i>, © 2004-2021 Matt Pharr, Wenzel Jakob, and Greg Humphreys under the <a href="https://creativecommons.org/licenses/by-nc-nd/4.0/">CC BY-NC-ND 4.0</a> license. (<a href="https://github.com/mmp/pbr-book-website/">github</a>)</span>
<div class="container">
<ul class="nav navbar-nav ml-auto">
<li class="nav-item">Next: <a href="Preface/Further_Reading.html">Preface / Further Reading</a></li>
</ul>
</div>
</nav>
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.3/umd/popper.min.js" integrity="sha384-ZMP7rVo3mIykV+2+9J3UJ46jBk0WLaUAdn689aCwoqbBJiSnjAK/l8WvCWPIPm49" crossorigin="anonymous"></script>
<script>
$(function () {
$('[data-toggle="popover"]').popover()
$('[data-toggle="tooltip"]').tooltip()
})
</script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/js/bootstrap.min.js" integrity="sha384-ChfqqxuZUCnJSK3+MXmPNIyE6ZbWh2IMqE241rYiqJxyMiZ6OW/JmZQ5stwEULTy" crossorigin="anonymous"></script>
<script>
// https://stackoverflow.com/a/17535094
// The function actually applying the offset
function offsetAnchor() {
if (location.hash.length !== 0) {
window.scrollTo(window.scrollX, window.scrollY - window.innerHeight / 8);
}
}
// Captures click events of all <a> elements with href starting with #
$(document).on('click', 'a[href^="#"]', function(event) {
// Click events are captured before hashchanges. Timeout
// causes offsetAnchor to be called after the page jump.
window.setTimeout(function() {
offsetAnchor();
}, 500);
});
// Set the offset when entering page with hash present in the url
window.setTimeout(offsetAnchor, 1500);
</script>
</body>
</html>