Skip to content
Snippets Groups Projects
Commit 348561fe authored by Brian Wandell's avatar Brian Wandell
Browse files

Setting up a demonstration of how to render on the google cloud platform (GCP).

parent 88122d44
No related branches found
No related tags found
No related merge requests found
function [ scene, mappings ] = MexximpRemodellerMultipleObj( scene, mappings, names, conditionValues, conditionNumber )
% Remodeler used usually in remodelPerConditionAfterFunction
%
%
% Typically pointed to by
% hints.batchRenderStrategy.remodelPerConditionAfterFunction
%
% At this point conditionNumber is not used but it might be used later for
% information.
%
% HB, SCIEN STanford, 2017
%% PROGRAMMING TODO
% The 1000 is just annoying and makes units better but we need a general fix.
% Maybe because we are using millimeters everywhere.
% Where the light is coming from
shadowDirection = rtbGetNamedNumericValue(names,conditionValues,'shadowDirection',[]);
%
cameraPosition = rtbGetNamedNumericValue(names,conditionValues,'position',[]);
%
cameraLookAt = rtbGetNamedNumericValue(names,conditionValues,'lookAt',[]);
cameraPTR = rtbGetNamedNumericValue(names,conditionValues,'PTR',[0 0 0]);
objMovementFile = rtbGetNamedValue(names,conditionValues,'objPosFile','');
%% Add a camera
scene = mexximpCentralizeCamera(scene);
lookUp = [0 0 -1];
cameraLookDir = cameraLookAt - cameraPosition;
cameraPTR = deg2rad(cameraPTR);
transformation = mexximpLookAt(1000*cameraPosition,1000*cameraLookAt,lookUp);
ptrTransform = mexximpPTR(cameraPTR(1), cameraPTR(2), cameraPTR(3), cameraLookDir, lookUp);
cameraId = strcmp({scene.rootNode.children.name},'Camera');
scene.rootNode.children(cameraId).transformation = ...
transformation*mexximpTranslate(-1000*cameraPosition)*ptrTransform*mexximpTranslate(1000*cameraPosition);
%% Translate the objects
objects = loadjson(objMovementFile,'SimplifyCell',1);
for i=1:length(scene.rootNode.children)
for o=1:length(objects)
if isempty(strfind(scene.rootNode.children(i).name,objects(o).prefix)) == false
position = objects(o).position*1000;
orientation = objects(o).orientation;
scene.rootNode.children(i).transformation = scene.rootNode.children(i).transformation*...
mexximpRotate([0 0 -1],deg2rad(orientation))*mexximpTranslate(position);
end
end
end
% Add directional light (named 'SunLight');
ambient = mexximpConstants('light');
ambient.position = [0 0 0]';
ambient.type = 'directional';
ambient.name = 'SunLight';
ambient.lookAtDirection = shadowDirection(:);
ambient.ambientColor = 10000*[1 1 1]';
ambient.diffuseColor = 10000*[1 1 1]';
ambient.specularColor = 10000*[1 1 1]';
ambient.constantAttenuation = 1;
ambient.linearAttenuation = 0;
ambient.quadraticAttenuation = 1;
ambient.innerConeAngle = 0;
ambient.outerConeAngle = 0;
scene.lights = [scene.lights, ambient];
ambientNode = mexximpConstants('node');
ambientNode.name = ambient.name;
ambientNode.transformation = eye(4);
scene.rootNode.children = [scene.rootNode.children, ambientNode];
end
function [ nativeScene ] = PBRTRemodeller( parentScene, nativeScene, mappings, names, conditionValues, conditionNumbers )
% Attaches PBRT-specific constructs to the PBRT scene
%
%
%
% We start with an assimp scene, then we need to take the assimp representation
% and add (or change) the PBRT representations to create the new scene in proper
% PBRT format
%
% For example, assimp has no notion of spectrum. So, we add the spectral
% information here.
%
% Used in
% hints.batchRenderStrategy.converter.remodelAfterMappingsFunction
%
% HB
%%
cameraType = rtbGetNamedValue(names,conditionValues,'cameraType',[]);
lensType = rtbGetNamedValue(names,conditionValues,'lensType',[]);
mode = rtbGetNamedValue(names,conditionValues,'mode',[]);
pixelSamples = rtbGetNamedNumericValue(names,conditionValues,'pixelSamples',[]);
filmDist = rtbGetNamedNumericValue(names,conditionValues,'filmDist',[]);
filmDiag = rtbGetNamedNumericValue(names,conditionValues,'filmDiag',[]);
microlensDim = rtbGetNamedNumericValue(names,conditionValues,'microlensDim',[0, 0]);
fNumber = rtbGetNamedNumericValue(names,conditionValues,'fNumber',[]);
fog = 0;
diffraction = rtbGetNamedValue(names,conditionValues,'diffraction','true');
chromaticAberration = rtbGetNamedValue(names,conditionValues,'chromaticAberration','false');
%{
scale = MPbrtElement('Scale');
scale.identifier = 'Scale';
scale.valueType='raw';
scale.value = [-1 1 1];
nativeScene.overall.prepend(scale);
%}
light = nativeScene.world.find('LightSource','name','1_SunLight');
light.setParameter('L','spectrum','resources/D65.spd');
environmentLight = MPbrtElement('LightSource','type','infinite');
environmentLight.setParameter('nsamples','integer',32);
environmentLight.setParameter('mapname','string','resources/sky_lightblueFixed_ud.exr');
environmentLight.setParameter('scale','color',1000*[1 1 1]);
nativeScene.world.append(environmentLight);
% Depending on the camera type we may need to set different parameters. If a
% lens type, then uses the default.
switch cameraType
case 'perspective'
camera = nativeScene.overall.find('Camera');
camera.setParameter('fov','float',35);
case 'pinhole'
camera = nativeScene.overall.find('Camera');
camera.parameters = [];
camera.type = 'pinhole';
camera.setParameter('filmdiag','float',filmDiag);
camera.setParameter('filmdistance','float',filmDist);
case 'lightfield'
pos = strfind(lensType,'.');
pos2 = strfind(lensType,'mm');
fLength = lensType(pos(2)+1:pos2(1)-1);
fLength = str2double(fLength);
camera = nativeScene.overall.find('Camera');
camera.type = 'realisticDiffraction';
camera.parameters = [];
camera.setParameter('aperture_diameter','float',fLength/fNumber);
camera.setParameter('filmdiag','float',filmDiag);
camera.setParameter('filmdistance','float',filmDist);
camera.setParameter('num_pinholes_h','float',microlensDim(1));
camera.setParameter('num_pinholes_w','float',microlensDim(2));
camera.setParameter('microlens_enabled','float',0);
camera.setParameter('diffractionEnabled','bool',diffraction);
camera.setParameter('chromaticAberrationEnabled','bool',chromaticAberration);
camera.setParameter('specfile','string',sprintf('resources/%s.dat',lensType));
otherwise
% Used for all types, such as 'lens'
%
pos = strfind(lensType,'.');
pos2 = strfind(lensType,'mm');
fLength = lensType(pos(2)+1:pos2(1)-1);
fLength = str2double(fLength);
camera = nativeScene.overall.find('Camera');
camera.type = 'realisticDiffraction';
camera.parameters = [];
camera.setParameter('aperture_diameter','float',fLength/fNumber);
camera.setParameter('filmdiag','float',filmDiag);
camera.setParameter('filmdistance','float',filmDist);
camera.setParameter('num_pinholes_h','float',0);
camera.setParameter('num_pinholes_w','float',0);
camera.setParameter('microlens_enabled','float',0);
camera.setParameter('diffractionEnabled','bool',diffraction);
camera.setParameter('chromaticAberrationEnabled','bool',chromaticAberration);
camera.setParameter('specfile','string',sprintf('resources/%s.dat',lensType));
end
integrator = nativeScene.overall.find('SurfaceIntegrator');
sampler = nativeScene.overall.find('Sampler');
filter = nativeScene.overall.find('PixelFilter');
switch mode
case {'depth'}
integrator.type = 'metadata';
integrator.parameters = [];
integrator.setParameter('strategy','string','depth');
sampler.type = 'stratified';
sampler.parameters = [];
sampler.setParameter('jitter','bool','false');
sampler.setParameter('xsamples','integer',1);
sampler.setParameter('ysamples','integer',1);
sampler.setParameter('pixelsamples','integer',1);
filter.type = 'box';
filter.parameters = [];
filter.setParameter('xwidth','float',0.5);
filter.setParameter('ywidth','float',0.5);
case {'material'}
integrator.type = 'metadata';
integrator.parameters = [];
integrator.setParameter('strategy','string','material');
sampler.type = 'stratified';
sampler.parameters = [];
sampler.setParameter('jitter','bool','false');
sampler.setParameter('xsamples','integer',1);
sampler.setParameter('ysamples','integer',1);
sampler.setParameter('pixelsamples','integer',1);
filter.type = 'box';
filter.parameters = [];
filter.setParameter('xwidth','float',0.5);
filter.setParameter('ywidth','float',0.5);
case {'mesh'}
integrator.type = 'metadata';
integrator.parameters = [];
integrator.setParameter('strategy','string','mesh');
sampler.type = 'stratified';
sampler.parameters = [];
sampler.setParameter('jitter','bool','false');
sampler.setParameter('xsamples','integer',1);
sampler.setParameter('ysamples','integer',1);
sampler.setParameter('pixelsamples','integer',1);
filter.type = 'box';
filter.parameters = [];
filter.setParameter('xwidth','float',0.5);
filter.setParameter('ywidth','float',0.5);
otherwise % Generate radiance data
if fog == true
nativeScene.overall.find('SurfaceIntegrator','remove',true);
volumeIntegrator = MPbrtElement('VolumeIntegrator','type','single');
volumeIntegrator.setParameter('stepsize','float',50);
nativeScene.overall.append(volumeIntegrator);
fogVolume = MPbrtElement('Volume','type','water');
fogVolume.setParameter('p0','point','-100000 -100000 -10000');
fogVolume.setParameter('p1','point',sprintf('100000 100000 100000'));
fogVolume.setParameter('absorptionCurveFile','spectrum',sprintf('resources/abs_fog.spd'));
fogVolume.setParameter('phaseFunctionFile','string',sprintf('resources/phase_fog.spd'));
fogVolume.setParameter('scatteringCurveFile','spectrum',sprintf('resources/scat_fog.spd'));
nativeScene.world.append(fogVolume);
else
integrator.type = 'path';
sampler.setParameter('pixelsamples','integer',pixelSamples);
end
if (strcmp(chromaticAberration,'true') == 1) && (strcmp(cameraType,'pinhole') == 0)
renderer = MPbrtElement('Renderer','type','spectralrenderer');
nativeScene.overall.append(renderer);
end
end
end
%% Compare lens renderings of different cars
%
% BW, Henryk Blasinski, SCIEN Stanford, 2017
%% Scene description
% Initialize ISET related variables
ieInit;
% Sets up related to the car renderings and local directory tree
% Maybe should be called nnDirectories
nnConstants;
tokenPath = '/home/wandell/gcloud/primalsurfer-token.json'; % Path to a storage admin access key
gcloud = true;
% Different labs might want to use the GCP in different zones.
% The defaults are set for the Wandell lab resources. But to change the zone
% you could use this:
% zone = 'us-west1-b';
% and then set a 'zone' parameter below.
% Should have a validity check. Surprising that we have the tokenPath early in
% the ordering within this nnHintsInit routine
% Small image size for debugging.
hints = nnHintsInit('imageWidth',160,'imageHeight',120,...
'recipeName','cloud-example',...
'tokenPath',tokenPath,...
'gcloud',gcloud,...
'mexximpRemodeler', @MexximpRemodellerMultipleObj);
%% Open the GCP
rtbCloudInit(hints);
%% Full path to the object we are going to render
sceneFile = which('millenium-falcon.obj');
% Camera set to be 50 meters from an object distance
% This could be an array of cameras.
cameras = nnGenCameras('type',{'pinhole'},...
'mode',{'radiance'},...
'distance',50);
% Set up the work space
resourceFolder = rtbWorkingFolder('folderName','resources',...
'rendererSpecific',false,...
'hints',hints);
% Copy all the lens files for all the cameras into the work space
for ii=1:length(cameras)
lensFile = fullfile(lensDir,strcat(cameras(ii).lens,'.dat'));
copyfile(lensFile,resourceFolder);
end
% Use ISET, get D65 spectrum in photons and write to work space
wave = 400:10:700;
il = illuminantCreate('D65',wave);
d65 = illuminantGet(il,'photons');
rtbWriteSpectrumFile(wave,d65,fullfile(resourceFolder,'D65.spd'));
%% Build the scene
% dragonFile = which('Dragon.blend');
% dragonScene = mexximpCleanImport(dragonFile,...
% 'ignoreRootTransform',true,...
% 'flipUVs',true,...
% 'imagemagicImage','hblasins/imagemagic-docker',...
% 'toReplace',{'jpg','png','tga'},...
% 'options','-gamma 0.45',...
% 'targetFormat','exr',...
% 'makeLeftHanded',true,...
% 'flipWindingOrder',true,...
% 'workingFolder',resourceFolder);
% Import the millenial faclon, which is small
mfScene = mexximpCleanImport(sceneFile,...
'ignoreRootTransform',true,...
'flipUVs',true,...
'imagemagicImage','hblasins/imagemagic-docker',...
'toReplace',{'jpg','png','tga'},...
'options','-gamma 0.45',...
'targetFormat','exr',...
'makeLeftHanded',true,...
'flipWindingOrder',true,...
'workingFolder',resourceFolder);
%% Initiate two poses
objects(2).prefix = ''; % Note that spaces or : are not allowed
objects(2).position = [10 10 0];
objects(2).orientation = 30;
objects(2).bndbox = mat2str(mexximpSceneBox(mfScene));
objects(1) = objects(2);
objects(1).orientation = 60;
objectArrangements = {objects(1), objects(2)};
% For each fixed configuration of the objects, we render a series of images for
% different camera properties. For example, this function sets particularly the position,
% lookAt and film distance variables. Other slots are copied from the camera
% object itself. The placedCameras combine the different object arrangements
% and cameras. The output is placedCameras{nCameras}(nArrangements).
placedCameras = nnPlaceCameras(cameras,objectArrangements);
%% Make values used for the Conditions file.
%
% Parameters are placed in a struct that will be gridded for the
% conditions.
% These are the variable names used in the conditionsFile.
% See
% https://github.com/RenderToolbox/RenderToolbox4/wiki/Conditions-File-
% Some of these are standard. Some are selected here.
conditionsFile = fullfile(resourceFolder,'Conditions.txt');
names = cat(1,'imageName',fieldnames(placedCameras{1}),'objPosFile');
values = cell(1,length(names));
% Think about this
cntr = 1;
sceneId=1; % Why not
for m=1:length(objectArrangements)
% Create the name for the JSON file and save it
objectArrangementFile = fullfile(resourceFolder,sprintf('Arrangement_%i.json',m));
savejson('',objectArrangements{m},objectArrangementFile);
currentCameras = placedCameras{m}; % An array of cameras
% At this point, we now set up the parameters for the remodeler. The
% remodeler name is in
% hints.batchRenderStrategy.remodelPerConditionAfterFunction
%
for c=1:length(placedCameras{m});
% The scene output file name. Mode determines the type, from radiance,
% mesh, depth ...
fName = sprintf('%03i_%s',sceneId,currentCameras(c).mode);
values(cntr,1) = {fName};
for i=2:(length(names)-1)
values(cntr,i) = {currentCameras(c).(names{i})};
end
values(cntr,length(names)-1) = {objectArrangementFile};
if strcmp(currentCameras(c).mode,'radiance')
sceneId = sceneId+1;
end
cntr = cntr + 1;
end
end
rtbWriteConditionsFile(conditionsFile,names,values);
% edit(conditionsFile);
%% Generate files and render
% We parallelize scene generation, not the rendering because
% PBRT automatically scales the number of processes to equal the
% number of cores.
%
nativeSceneFiles = rtbMakeSceneFiles(scene, 'hints', hints,...
'conditionsFile',conditionsFile);
fprintf('Uploading data to gcloud\n');
rtbCloudUpload(hints, nativeSceneFiles);
fprintf('Data uploaded\n');
%%
fprintf('Batch rendering %d files\n',length(nativeSceneFiles));
rtbBatchRender(nativeSceneFiles, 'hints', hints);
fprintf('Jobs initiated\n');
radianceDataFiles = [];
while isempty(radianceDataFiles)
radianceDataFiles = rtbCloudDownload(hints);
pause(10);
end
% We aren't saving the radianceDataFiles for all the conditions.
% This means we have to rerun too many times.
%
% Also, we don't have the true irradiance level, just a
% noise-free irradiance. So, we should aim to set the
% irradiance to a reasonable level here.
%
% load('radianceDataFiles');
%%
fprintf('Creating OI\n');
for i=1:length(radianceDataFiles)
% chdir(fullfile(nnGenRootPath,'local'));
% save('radianceDataFiles','radianceDataFiles');
radianceData = load(radianceDataFiles{i});
% Create an oi and set the parameters
clear oiParams;
oiParams.optics_name = lensType{lt};
oiParams.optics_model = 'diffractionlimited';
oiParams.fov = fov;
switch ieParamFormat(lensType{lt})
case 'pinhole'
oiParams.optics_fnumber = 999;
otherwise
oiParams.optics_fnumber = fNumber(lt);
end
oiParams.optics_focalLength = filmDistanceVec(lt)*1e-3; % In meters
[~, label] = fileparts(radianceDataFiles{i});
oiParams.name = label;
oi = buildOi(radianceData.multispectralImage, [], oiParams);
oi = oiAdjustIlluminance(oi,meanIlluminance);
ieAddObject(oi);
oiWindow;
end
%% Save out the oi if you like
if 0
chdir(fullfile(nnGenRootPath,'local','tmp'));
oiNames = vcGetObjectNames('oi');
for ii=1:length(oiNames)
thisOI = ieGetObject('oi',ii);
save([oiNames{ii},'.mat'],'thisOI');
end
end
%%
if 0
%% Experiment with different camera renderings
oi = ieGetObject('oi');
fov = oiGet(oi,'fov');
oi = oiAdjustIlluminance(oi,10); % The illuminance values are very small
% Big sensor
sensor = sensorCreate;
sensor = sensorSet(sensor,'fov',fov);
sensor = sensorCompute(sensor,oi);
ieAddObject(sensor); sensorWindow;
ip = ipCreate;
ip = ipCompute(ip,sensor);
ieAddObject(ip); ipWindow;
end
%%
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment