SHOW:
|
|
- or go back to the newest paste.
1 | - | import nuke |
1 | + | import math |
2 | ||
3 | ||
4 | def createExrCamVray( node ): | |
5 | ''' | |
6 | Create a camera node based on VRay metadata. | |
7 | This works specifically on VRay data coming from maya. | |
8 | ''' | |
9 | ||
10 | #Big thanks to Ivan Busquets who helped me put this together! | |
11 | #(ok, ok, he really helped me a lot) | |
12 | #Also thanks to Nathan Dunsworth for giving me solid ideas and some code to get me started. | |
13 | ||
14 | ### TODO : add progress bar (even though it's really not needed here) that works | |
15 | ||
16 | mDat = node.metadata() | |
17 | reqFields = ['exr/camera%s' % i for i in ('FocalLength', 'Aperture', 'Transform')] | |
18 | if not set( reqFields ).issubset( mDat ): | |
19 | print 'no metdata for camera found' | |
20 | return | |
21 | ||
22 | - | task = nuke.ProgressTask( 'Baking camera from meta data in %s' % node.name() ) |
22 | + | |
23 | last = node.lastFrame() | |
24 | ret = nuke.getFramesAndViews( 'Create Camera from Metadata', '%s-%s' %( first, last ) ) | |
25 | - | if task.isCancelled(): |
25 | + | |
26 | - | break |
26 | + | |
27 | - | task.setMessage( 'processing frame %s' % frame ) |
27 | + | |
28 | cam['useMatrix'].setValue( False ) | |
29 | - | val = node.metadata( 'exr/cameraFocalLength') |
29 | + | |
30 | for k in ( 'focal', 'haperture', 'translate', 'rotate'): | |
31 | cam[k].setAnimated() | |
32 | ||
33 | - | cam['focal'].setValueAt(float(val),frame) |
33 | + | #task = nuke.ProgressTask( 'Baking camera from meta data in %s' % node.name() ) |
34 | ||
35 | - | val = node.metadata( 'exr/cameraAperture') |
35 | + | |
36 | - | tx = node.metadata('exr/cameraTransform')[12] |
36 | + | #if task.isCancelled(): |
37 | - | tx = node.metadata('exr/cameraTransform')[13] |
37 | + | #nuke.executeInMainThread( nuke.message, args=( "Phew!" ) ) |
38 | - | tx = node.metadata('exr/cameraTransform')[14] |
38 | + | #break; |
39 | #task.setMessage( 'processing frame %s' % frame ) | |
40 | ||
41 | - | cam['translate'].setValueAt(float(tx),frame,0) |
41 | + | |
42 | - | cam['translate'].setValueAt(float(tx),frame,1) |
42 | + | # IB. If you get both focal and aperture as they are in the metadata, there's no guarantee |
43 | - | cam['translate'].setValueAt(float(tx),frame,2) |
43 | + | # your Nuke camera will have the same FOV as the one that rendered the scene (because the render could have been fit to horizontal, to vertical, etc) |
44 | # Nuke always fits to the horizontal aperture. If you set the horizontal aperture as it is in the metadata, | |
45 | - | # CONVERT STRING BACK TO LIST OBJECT AND ASSIGN |
45 | + | # then you should use the FOV in the metadata to figure out the correct focal length for Nuke's camera |
46 | - | matrixList = node.metadata('exr/cameraTransform') |
46 | + | # Or, you could keep the focal as is in the metadata, and change the horizontal_aperture instead. |
47 | - | #for i, v in enumerate( matrixList ): |
47 | + | # I'll go with the former here. Set the haperture knob as per the metadata, and derive the focal length from the FOV |
48 | - | # cam[ 'matrix' ].setValueAt( v, frame, i) |
48 | + | |
49 | - | # UPDATE PROGRESS BAR |
49 | + | val = node.metadata( 'exr/cameraAperture', frame) # get horizontal aperture |
50 | - | task.setProgress( int( float(curTask) / fRange.frames() *100) ) |
50 | + | fov = node.metadata( 'exr/cameraFov', frame) # get camera FOV |
51 | ||
52 | focal = val / (2 * math.tan(math.radians(fov)/2.0)) # convert the fov and aperture into focal length | |
53 | ||
54 | cam['focal'].setValueAt(float(focal),frame) | |
55 | cam['haperture'].setValueAt(float(val),frame) | |
56 | ||
57 | matrixCamera = node.metadata( 'exr/cameraTransform', frame) # get camera transform data | |
58 | ||
59 | #Create a matrix to shove the original data into | |
60 | matrixCreated = nuke.math.Matrix4() | |
61 | ||
62 | for k,v in enumerate(matrixCamera): | |
63 | matrixCreated[k] = v | |
64 | ||
65 | matrixCreated.rotateX(math.radians(-90)) # this is needed for VRay. It's a counter clockwise rotation | |
66 | translate = matrixCreated.transform(nuke.math.Vector3(0,0,0)) # Get a vector that represents the camera translation | |
67 | rotate = matrixCreated.rotationsZXY() # give us xyz rotations from cam matrix (must be converted to degrees) | |
68 | ||
69 | cam['translate'].setValueAt(float(translate.x),frame,0) | |
70 | cam['translate'].setValueAt(float(translate.y),frame,1) | |
71 | cam['translate'].setValueAt(float(translate.z),frame,2) | |
72 | cam['rotate'].setValueAt(float(math.degrees(rotate[0])),frame,0) | |
73 | cam['rotate'].setValueAt(float(math.degrees(rotate[1])),frame,1) | |
74 | cam['rotate'].setValueAt(float(math.degrees(rotate[2])),frame,2) | |
75 | ||
76 | # task.setProgress( int( float(curTask) / fRange.frames() *100) ) |