本文整理汇总了Java中bdv.spimdata.SpimDataMinimal类的典型用法代码示例。如果您正苦于以下问题:Java SpimDataMinimal类的具体用法?Java SpimDataMinimal怎么用?Java SpimDataMinimal使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SpimDataMinimal类属于bdv.spimdata包,在下文中一共展示了SpimDataMinimal类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: load
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
@SuppressWarnings( "unchecked" )
@Override
public SpimDataMinimal[] load()
{
SpimDataMinimal spimData = null;
try
{
spimData = new XmlIoSpimDataMinimal().load( xmlPath );
if ( WrapBasicImgLoader.wrapImgLoaderIfNecessary( spimData ) )
{
System.err.println( "WARNING:\nOpening <SpimData> dataset that is not suited for interactive browsing.\nConsider resaving as HDF5 for better performance." );
}
}
catch ( final SpimDataException e )
{
e.printStackTrace();
}
return new SpimDataMinimal[]{ spimData };
}
开发者ID:saalfeldlab,项目名称:bigwarp,代码行数:23,代码来源:XMLLoader.java
示例2: CellHandler
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public CellHandler( final String baseUrl, final String xmlFilename, final String datasetName, final String thumbnailsDirectory ) throws SpimDataException, IOException
{
final XmlIoSpimDataMinimal io = new XmlIoSpimDataMinimal();
final SpimDataMinimal spimData = io.load( xmlFilename );
final SequenceDescriptionMinimal seq = spimData.getSequenceDescription();
final Hdf5ImageLoader imgLoader = ( Hdf5ImageLoader ) seq.getImgLoader();
cache = imgLoader.getCacheControl();
loader = imgLoader.getShortArrayLoader();
cacheHints = new CacheHints( LoadingStrategy.BLOCKING, 0, false );
// dataSetURL property is used for providing the XML file by replace
// SequenceDescription>ImageLoader>baseUrl
this.xmlFilename = xmlFilename;
baseFilename = xmlFilename.endsWith( ".xml" ) ? xmlFilename.substring( 0, xmlFilename.length() - ".xml".length() ) : xmlFilename;
dataSetURL = baseUrl;
datasetXmlString = buildRemoteDatasetXML( io, spimData, baseUrl );
metadataJson = buildMetadataJsonString( imgLoader, seq );
settingsXmlString = buildSettingsXML( baseFilename );
thumbnailFilename = createThumbnail( spimData, baseFilename, datasetName, thumbnailsDirectory );
}
开发者ID:bigdataviewer,项目名称:bigdataviewer-server,代码行数:23,代码来源:CellHandler.java
示例3: createThumbnail
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
/**
* Create PNG thumbnail file named "{@code <baseFilename>.png}".
*/
private static String createThumbnail( final SpimDataMinimal spimData, final String baseFilename, final String datasetName, final String thumbnailsDirectory )
{
final String thumbnailFileName = thumbnailsDirectory + "/" + datasetName + ".png";
final File thumbnailFile = new File( thumbnailFileName );
if ( !thumbnailFile.isFile() ) // do not recreate thumbnail if it already exists
{
final BufferedImage bi = ThumbnailGenerator.makeThumbnail( spimData, baseFilename, Constants.THUMBNAIL_WIDTH, Constants.THUMBNAIL_HEIGHT );
try
{
ImageIO.write( bi, "png", thumbnailFile );
}
catch ( final IOException e )
{
LOG.warn( "Could not create thumbnail png for dataset \"" + baseFilename + "\"" );
LOG.warn( e.getMessage() );
}
}
return thumbnailFileName;
}
开发者ID:bigdataviewer,项目名称:bigdataviewer-server,代码行数:23,代码来源:CellHandler.java
示例4: exec
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public void exec( final String xmlHDF5Path, final int setupID, final String tgmmPath, final String outputPath, final boolean doCrop, final RealInterval interval, final int tFrom, final int tTo )
{
SpimDataMinimal spimData;
try
{
spimData = new XmlIoSpimDataMinimal().load( xmlHDF5Path );
}
catch ( final SpimDataException e )
{
logger.error( "Problem reading the transforms in image data file:\n" + e.getMessage() + "\n" );
return;
}
final Model model = createModel( new File( tgmmPath ), spimData, setupID, interval, tFrom, tTo );
model.setLogger( logger );
final Settings settings = createSettings( new File( xmlHDF5Path ) );
final TrackMate trackmate = new TrackMate( model, settings );
trackmate.setNumThreads( 1 );
trackmate.computeSpotFeatures( true );
trackmate.computeEdgeFeatures( true );
trackmate.computeTrackFeatures( true );
save( outputPath, model, settings );
}
开发者ID:fiji,项目名称:MaMuT,代码行数:25,代码来源:ImportTGMMAnnotationPlugin_.java
示例5: run
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
@Override
public void run( final String ignored )
{
final File imageFile = askForImageFile();
if ( null == imageFile ) { return; }
final File tgmmFolder = askForTGMMFolder();
if ( null == tgmmFolder ) { return; }
SpimDataMinimal spimData;
try
{
spimData = new XmlIoSpimDataMinimal().load( imageFile.getAbsolutePath() );
}
catch ( final SpimDataException e )
{
logger.error( "Problem reading the transforms in image data file:\n" + e.getMessage() + "\n" );
return;
}
final String[] angles = readSetupNames( spimData.getSequenceDescription() );
final int angleIndex = askForAngle( angles );
if ( angleIndex < 0 ) { return; }
final int setupID = spimData.getSequenceDescription().getViewSetupsOrdered().get( angleIndex ).getId();
launchMamut( imageFile, tgmmFolder, setupID, interval );
}
开发者ID:fiji,项目名称:MaMuT,代码行数:27,代码来源:LoadTGMMAnnotationPlugIn.java
示例6: launchMamut
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public void launchMamut( final File imageFile, final File tgmmFile, final int setupID, final RealInterval interval )
{
SpimDataMinimal spimData;
try
{
spimData = new XmlIoSpimDataMinimal().load( imageFile.getAbsolutePath() );
}
catch ( final SpimDataException e )
{
logger.error( "Problem reading the transforms in image data file:\n" + e.getMessage() + "\n" );
return;
}
final Model model = createModel( tgmmFile, spimData, setupID, interval );
final SourceSettings settings = createSettings();
new MaMuT( imageFile, model, settings );
}
开发者ID:fiji,项目名称:MaMuT,代码行数:17,代码来源:LoadTGMMAnnotationPlugIn.java
示例7: createModel
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
protected Model createModel( final File tgmmFolder, final SpimDataMinimal spimData, final int setupID, final RealInterval interval )
{
final List< AffineTransform3D > transforms = pickTransform( spimData, setupID );
final TGMMImporter2 importer = new TGMMImporter2( tgmmFolder, transforms, TGMMImporter2.DEFAULT_PATTERN, logger, interval, 0, Integer.MAX_VALUE );
if ( !importer.checkInput() || !importer.process() )
{
logger.error( importer.getErrorMessage() );
return new Model();
}
final Model model = importer.getResult();
/*
* Hack to set the POSITION_T feature of imported spots.
*/
final Settings settings = new Settings();
settings.dt = 1;
final TrackMate trackmate = new TrackMate( model, settings );
final ResetSpotTimeFeatureAction action = new ResetSpotTimeFeatureAction();
action.execute( trackmate );
return model;
}
开发者ID:fiji,项目名称:MaMuT,代码行数:24,代码来源:LoadTGMMAnnotationPlugIn.java
示例8: fix
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public static void fix( final String xmlFilename ) throws SpimDataException, IOException
{
final XmlIoSpimDataMinimal spimDataIo = new XmlIoSpimDataMinimal();
final SpimDataMinimal spimData = spimDataIo.load( xmlFilename );
final SequenceDescriptionMinimal seq = spimData.getSequenceDescription();
final Hdf5ImageLoader il = ( Hdf5ImageLoader) seq.getImgLoader();
final String outfn = il.getHdf5File().getCanonicalPath() + "FIXED";
final HashMap< Integer, ExportMipmapInfo > perSetupMipmapInfo = new HashMap<>();
for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() )
{
final int setupId = setup.getId();
final MipmapInfo info = il.getSetupImgLoader( setupId ).getMipmapInfo();
perSetupMipmapInfo.put( setupId, new ExportMipmapInfo(
Util.castToInts( info.getResolutions() ),
info.getSubdivisions() ) );
}
final ArrayList< Partition > partitions = il.getPartitions();
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( seq, perSetupMipmapInfo, partitions, new File( outfn ) );
System.out.println( "fixed hdf5 master file written to " + outfn );
System.out.println( "rename it to " + il.getHdf5File().getCanonicalPath() + " to use it." );
}
开发者ID:bigdataviewer,项目名称:bigdataviewer_fiji,代码行数:23,代码来源:FixAbsolutePathsInHdf5Partitions.java
示例9: loadAll
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public SpimDataMinimal[] loadAll( int startid )
{
SpimDataMinimal[] out = new SpimDataMinimal[ impList.length ];
index = startid;
for( int i = 0; i < impList.length; i++ )
{
out[ i ] = load( range( index, impList[ i ].getNChannels() ), impList[ i ] );
index += impList[ i ].getNChannels();
}
return out;
}
开发者ID:saalfeldlab,项目名称:bigwarp,代码行数:12,代码来源:ImagePlusLoader.java
示例10: buildRemoteDatasetXML
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
/**
* Create a modified dataset XML by replacing the ImageLoader with an
* {@link RemoteImageLoader} pointing to the data we are serving.
*/
private static String buildRemoteDatasetXML( final XmlIoSpimDataMinimal io, final SpimDataMinimal spimData, final String baseUrl ) throws IOException, SpimDataException
{
final SpimDataMinimal s = new SpimDataMinimal( spimData, new RemoteImageLoader( baseUrl, false ) );
final Document doc = new Document( io.toXml( s, s.getBasePath() ) );
final XMLOutputter xout = new XMLOutputter( Format.getPrettyFormat() );
final StringWriter sw = new StringWriter();
xout.output( doc, sw );
return sw.toString();
}
开发者ID:bigdataviewer,项目名称:bigdataviewer-server,代码行数:14,代码来源:CellHandler.java
示例11: createModel
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
protected Model createModel( final File tgmmFolder, final SpimDataMinimal spimData, final int setupID, final RealInterval interval, final int tFrom, final int tTo )
{
final SequenceDescriptionMinimal seq = spimData.getSequenceDescription();
final ViewRegistrations regs = spimData.getViewRegistrations();
final List< AffineTransform3D > transforms = new ArrayList< AffineTransform3D >( seq.getTimePoints().size() );
for ( final TimePoint t : seq.getTimePoints().getTimePointsOrdered() )
{
transforms.add( regs.getViewRegistration( t.getId(), setupID ).getModel() );
}
final TGMMImporter2 importer = new TGMMImporter2( tgmmFolder, transforms, TGMMImporter2.DEFAULT_PATTERN, logger, interval, tFrom, tTo );
if ( !importer.checkInput() || !importer.process() )
{
logger.error( importer.getErrorMessage() );
}
final Model model = importer.getResult();
/*
* Hack to set the POSITION_T feature of imported spots.
*/
final Settings settings = new Settings();
settings.dt = 1;
final TrackMate trackmate = new TrackMate( model, settings );
final ResetSpotTimeFeatureAction action = new ResetSpotTimeFeatureAction();
action.execute( trackmate );
return model;
}
开发者ID:fiji,项目名称:MaMuT,代码行数:30,代码来源:ImportTGMMAnnotationPlugin_.java
示例12: pickTransform
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
protected List< AffineTransform3D > pickTransform( final SpimDataMinimal spimData, final int setupID )
{
final SequenceDescriptionMinimal seq = spimData.getSequenceDescription();
final ViewRegistrations regs = spimData.getViewRegistrations();
final List< AffineTransform3D > transforms = new ArrayList< AffineTransform3D >( seq.getTimePoints().size() );
for ( final TimePoint t : seq.getTimePoints().getTimePointsOrdered() )
{
transforms.add( regs.getViewRegistration( t.getId(), setupID ).getModel() );
}
return transforms;
}
开发者ID:fiji,项目名称:MaMuT,代码行数:12,代码来源:LoadTGMMAnnotationPlugIn.java
示例13: openSequence
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
private static SequenceDescriptionMinimal openSequence( final String xmlFilename ) throws SpimDataException
{
final File f = new File( xmlFilename );
if ( f.exists() && f.isFile() && f.getName().endsWith( ".xml" ) )
{
final SpimDataMinimal spimData = new XmlIoSpimDataMinimal().load( xmlFilename );
return spimData.getSequenceDescription();
}
else
return null;
}
开发者ID:bigdataviewer,项目名称:bigdataviewer_fiji,代码行数:12,代码来源:ImportPlugIn.java
示例14: writeXmlAndLinks
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public void writeXmlAndLinks() throws SpimDataException
{
final SequenceDescriptionMinimal seq = spimData.getSequenceDescription();
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( seq, perSetupMipmapInfo, partitions, hdf5File );
final Hdf5ImageLoader loader = new Hdf5ImageLoader( hdf5File, partitions, null, false );
new XmlIoSpimDataMinimal().save( new SpimDataMinimal( spimData, loader ), seqFile.getAbsolutePath() );
}
开发者ID:bigdataviewer,项目名称:bigdataviewer_fiji,代码行数:9,代码来源:Scripting.java
示例15: createSpimData
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public static final SpimDataMinimal createSpimData( IFormatReader reader )
{
Hashtable< String, Object > gmeta = reader.getGlobalMetadata();
System.out.println( gmeta ); // header stuff here TODO
// get relevant metadata
double pw = 1.0;
double ph = 1.0;
double pd = 1.0;
if( gmeta.keySet().contains( "XResolution" ))
pw = ((Double)gmeta.get("XResolution")).doubleValue();
if( gmeta.keySet().contains( "YResolution" ))
ph = ((Double)gmeta.get("YResolution")).doubleValue();
if( gmeta.keySet().contains( "Spacing" ))
pd = ((Double)gmeta.get("Spacing")).doubleValue();
int numSetups = 1;
int numTimepoints = 1;
int[] ids = new int[]{ 349812342 };
final File basePath = new File( "." );
String punit = "px";
if( gmeta.keySet().contains( "Unit" ))
punit = (String) gmeta.get( "Unit" );
final FinalVoxelDimensions voxelSize = new FinalVoxelDimensions( punit, pw, ph, pd );
final long w = ((Long)gmeta.get("ImageWidth")).longValue();
final long h = ((Long)gmeta.get("ImageLength")).longValue();
final long d = reader.getSizeZ(); //Long.parseLong( (String)gmeta.get("images") );
long[] dims = new long[]{ w, h, d };
final FinalDimensions size = new FinalDimensions( new long[] { w, h, d } );
// create setups from channels
final HashMap< Integer, BasicViewSetup > setups = new HashMap< Integer, BasicViewSetup >( numSetups );
for ( int s = 0; s < numSetups; ++s )
{
final BasicViewSetup setup = new BasicViewSetup( ids[ s ], String.format( "channel %d", ids[ s ] + 1 ), size, voxelSize );
setup.setAttribute( new Channel( ids[ s ] + 1 ) );
setups.put( ids[ s ], setup );
}
// create timepoints
final ArrayList< TimePoint > timepoints = new ArrayList< TimePoint >( numTimepoints );
for ( int t = 0; t < numTimepoints; ++t )
timepoints.add( new TimePoint( t ) );
// create ViewRegistrations from the images calibration
final AffineTransform3D sourceTransform = new AffineTransform3D();
sourceTransform.set( 1.0/pw, 0, 0, 0, 0, 1.0/ph, 0, 0, 0, 0, pd, 0 );
final ArrayList< ViewRegistration > registrations = new ArrayList< ViewRegistration >();
for ( int t = 0; t < numTimepoints; ++t )
for ( int s = 0; s < numSetups; ++s )
registrations.add( new ViewRegistration( t, ids[ s ], sourceTransform ) );
final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal( new TimePoints( timepoints ), setups,
new DummyImageLoader< FloatType >( new FloatType(), dims ), null );
SpimDataMinimal spimData = new SpimDataMinimal( basePath, seq, new ViewRegistrations( registrations ) );
if ( WrapBasicImgLoader.wrapImgLoaderIfNecessary( spimData ) )
System.err.println( "WARNING:\nOpening <SpimData> dataset that is not suited for interactive browsing.\nConsider resaving as HDF5 for better performance." );
return spimData;
}
开发者ID:saalfeldlab,项目名称:bigwarp,代码行数:68,代码来源:BigWarpBatchTransform.java
示例16: load
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
@SuppressWarnings( "unchecked" )
@Override
public SpimDataMinimal[] load()
{
return loadAll( 255 );
}
开发者ID:saalfeldlab,项目名称:bigwarp,代码行数:7,代码来源:ImagePlusLoader.java
示例17: createSpimData
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public final SpimDataMinimal createSpimData()
{
int numSetups = 1;
int numTimepoints = 1;
int[] ids = new int[]{ 349812342 };
final File basePath = new File( "." );
double pw = spacingFull[ 0 ];
double ph = spacingFull[ 1 ];
double pd = spacingFull[ 2 ];
double ox = offsetFull[ 0 ] / spacingFull[ 0 ];
double oy = offsetFull[ 1 ] / spacingFull[ 1 ];
double oz = offsetFull[ 2 ] / spacingFull[ 2 ];
String punit = "px";
final FinalVoxelDimensions voxelSize = new FinalVoxelDimensions( punit, pw, ph, pd );
final long w = dimsFull[ 0 ];
final long h = dimsFull[ 1 ];
final long d = dimsFull[ 2 ];
final FinalDimensions size = new FinalDimensions( new long[] { w, h, d } );
// create setups from channels
final HashMap< Integer, BasicViewSetup > setups = new HashMap< Integer, BasicViewSetup >( numSetups );
for ( int s = 0; s < numSetups; ++s )
{
final BasicViewSetup setup = new BasicViewSetup( ids[ s ], String.format( "channel %d", ids[ s ] + 1 ), size, voxelSize );
setup.setAttribute( new Channel( ids[ s ] + 1 ) );
setups.put( ids[ s ], setup );
}
// create timepoints
final ArrayList< TimePoint > timepoints = new ArrayList< TimePoint >( numTimepoints );
for ( int t = 0; t < numTimepoints; ++t )
timepoints.add( new TimePoint( t ) );
// create ViewRegistrations from the images calibration
final AffineTransform3D sourceTransform = new AffineTransform3D();
sourceTransform.set( pw, 0, 0, ox, 0, ph, 0, oy, 0, 0, pd, oz );
final ArrayList< ViewRegistration > registrations = new ArrayList< ViewRegistration >();
for ( int t = 0; t < numTimepoints; ++t )
for ( int s = 0; s < numSetups; ++s )
registrations.add( new ViewRegistration( t, ids[ s ], sourceTransform ) );
final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal( new TimePoints( timepoints ), setups,
new DummyImageLoader< FloatType >( new FloatType(), this ), null );
SpimDataMinimal spimData = new SpimDataMinimal( basePath, seq, new ViewRegistrations( registrations ) );
if ( WrapBasicImgLoader.wrapImgLoaderIfNecessary( spimData ) )
System.err.println( "WARNING:\nOpening <SpimData> dataset that is not suited for interactive browsing.\nConsider resaving as HDF5 for better performance." );
return spimData;
}
开发者ID:saalfeldlab,项目名称:bigwarp,代码行数:55,代码来源:BigWarpBatchTransformFOV.java
示例18: makeThumbnail
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
/**
* Create a thumbnail image for a dataset. If there is a settings.xml file
* for the dataset, these settings are used for creating the thumbnail.
*
* @param spimData
* the dataset.
* @param baseFilename
* full path of dataset xml file, without the ".xml" extension.
* this is used to derive the name of the settings.xml file.
* @param width
* width of the thumbnail image.
* @param height
* height of the thumbnail image.
* @return thumbnail image
*/
public static BufferedImage makeThumbnail( final SpimDataMinimal spimData, final String baseFilename, final int width, final int height )
{
final ArrayList< ConverterSetup > converterSetups = new ArrayList< ConverterSetup >();
final ArrayList< SourceAndConverter< ? > > sources = new ArrayList< SourceAndConverter< ? > >();
BigDataViewer.initSetups( spimData, converterSetups, sources );
final int numTimepoints = spimData.getSequenceDescription().getTimePoints().size();
final ThumbnailGenerator generator = new ThumbnailGenerator( sources, numTimepoints );
final ViewerState state = generator.state;
final SetupAssignments setupAssignments = new SetupAssignments( converterSetups, 0, 65535 );
final AffineTransform3D initTransform = InitializeViewerState.initTransform( width, height, false, state );
state.setViewerTransform( initTransform );
if ( !generator.tryLoadSettings( baseFilename, setupAssignments ) )
InitializeViewerState.initBrightness( 0.001, 0.999, state, setupAssignments );
class ThumbnailTarget implements RenderTarget
{
BufferedImage bi;
@Override
public BufferedImage setBufferedImage( final BufferedImage bufferedImage )
{
bi = bufferedImage;
return null;
}
@Override
public int getWidth()
{
return width;
}
@Override
public int getHeight()
{
return height;
}
}
final ThumbnailTarget renderTarget = new ThumbnailTarget();
new MultiResolutionRenderer( renderTarget, new PainterThread( null ), new double[] { 1 }, 0, false, 1, null, false, AccumulateProjectorARGB.factory, new CacheControl.Dummy() ).paint( state );
return renderTarget.bi;
}
开发者ID:bigdataviewer,项目名称:bigdataviewer-server,代码行数:60,代码来源:ThumbnailGenerator.java
示例19: run
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
@Override
public void run()
{
if ( ij.Prefs.setIJMenuBar )
System.setProperty( "apple.laf.useScreenMenuBar", "true" );
final Parameters params = getParameters();
// cancelled
if ( params == null )
return;
final ProgressWriter progress = new ProgressWriterIJ();
progress.out().println( "starting export..." );
final SpimRegistrationSequence sequence = new SpimRegistrationSequence( params.conf );
final SequenceDescriptionMinimal desc = sequence.getSequenceDescription();
Map< Integer, ExportMipmapInfo > perSetupExportMipmapInfo;
if ( params.setMipmapManual )
{
perSetupExportMipmapInfo = new HashMap<>();
final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo( params.resolutions, params.subdivisions );
for ( final BasicViewSetup setup : desc.getViewSetupsOrdered() )
perSetupExportMipmapInfo.put( setup.getId(), mipmapInfo );
}
else
{
perSetupExportMipmapInfo = ProposeMipmaps.proposeMipmaps( desc );
}
final ArrayList< Partition > partitions;
final int numCellCreatorThreads = Math.max( 1, PluginHelper.numThreads() - 1 );
if ( params.split )
{
final String xmlFilename = params.seqFile.getAbsolutePath();
final String basename = xmlFilename.endsWith( ".xml" ) ? xmlFilename.substring( 0, xmlFilename.length() - 4 ) : xmlFilename;
final List< TimePoint > timepoints = desc.getTimePoints().getTimePointsOrdered();
final List< BasicViewSetup > setups = desc.getViewSetupsOrdered();
partitions = Partition.split( timepoints, setups, params.timepointsPerPartition, params.setupsPerPartition, basename );
for ( int i = 0; i < partitions.size(); ++i )
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progress, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, null, null, numCellCreatorThreads, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( desc, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
partitions = null;
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, null, null, numCellCreatorThreads, new SubTaskProgressWriter( progress, 0, 0.95 ) );
}
final Hdf5ImageLoader loader = new Hdf5ImageLoader( params.hdf5File, partitions, null, false );
final SequenceDescriptionMinimal sequenceDescription = new SequenceDescriptionMinimal( desc, loader );
final File basePath = params.seqFile.getParentFile();
final SpimDataMinimal spimData = new SpimDataMinimal( basePath, sequenceDescription, sequence.getViewRegistrations() );
try
{
new XmlIoSpimDataMinimal().save( spimData, params.seqFile.getAbsolutePath() );
progress.setProgress( 1.0 );
}
catch ( final Exception e )
{
progress.err().println( "Failed to write xml file " + params.seqFile );
e.printStackTrace( progress.err() );
}
progress.out().println( "done" );
}
开发者ID:bigdataviewer,项目名称:bigdataviewer_fiji,代码行数:72,代码来源:ExportSpimSequencePlugIn.java
示例20: saveAsNewFile
import bdv.spimdata.SpimDataMinimal; //导入依赖的package包/类
public static void saveAsNewFile( final Parameters params ) throws SpimDataException
{
final ProgressWriter progress = new ProgressWriterIJ();
final XmlIoSpimDataMinimal spimDataIo = new XmlIoSpimDataMinimal();
final SpimRegistrationSequence spimseq = new SpimRegistrationSequence( params.conf );
final Map< Integer, AffineTransform3D > fusionTransforms = spimseq.getFusionTransforms( params.cropOffsetX, params.cropOffsetY, params.cropOffsetZ, params.scale );
final FusionResult fusionResult = FusionResult.create( spimseq, params.fusionDirectory, params.filenamePattern, params.numSlices, params.sliceValueMin, params.sliceValueMax, fusionTransforms );
// sequence description (no ImgLoader yet)
final SequenceDescriptionMinimal desc = fusionResult.getSequenceDescription();
// create ExportMipmapInfos
final Map< Integer, ExportMipmapInfo > perSetupExportMipmapInfo = new HashMap<>();
final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo( params.resolutions, params.subdivisions );
for ( final BasicViewSetup setup : desc.getViewSetupsOrdered() )
perSetupExportMipmapInfo.put( setup.getId(), mipmapInfo );
// create partitions if desired
final ArrayList< Partition > partitions;
if ( params.split )
{
final String xmlFilename = params.seqFile.getAbsolutePath();
final String basename = xmlFilename.endsWith( ".xml" ) ? xmlFilename.substring( 0, xmlFilename.length() - 4 ) : xmlFilename;
final List< TimePoint > timepoints = desc.getTimePoints().getTimePointsOrdered();
final List< BasicViewSetup > setups = desc.getViewSetupsOrdered();
partitions = Partition.split( timepoints, setups, params.timepointsPerPartition, params.setupsPerPartition, basename );
}
else
partitions = null;
// write to hdf5
final int numCellCreatorThreads = Math.max( 1, PluginHelper.numThreads() - 1 );
if ( params.split )
{
for ( int i = 0; i < partitions.size(); ++i )
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progress, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, null, null, numCellCreatorThreads, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( desc, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, null, null, numCellCreatorThreads, new SubTaskProgressWriter( progress, 0, 0.95 ) );
}
// write xml file
final Hdf5ImageLoader loader = new Hdf5ImageLoader( params.hdf5File, partitions, null, false );
final SequenceDescriptionMinimal sequenceDescription = new SequenceDescriptionMinimal( desc, loader );
final File basePath = params.seqFile.getParentFile();
final SpimDataMinimal spimData = new SpimDataMinimal( basePath, sequenceDescription, fusionResult.getViewRegistrations() );
try
{
spimDataIo.save( spimData, params.seqFile.getAbsolutePath() );
progress.setProgress( 1.0 );
}
catch ( final Exception e )
{
progress.err().println( "Failed to write xml file " + params.seqFile );
e.printStackTrace( progress.err() );
}
}
开发者ID:bigdataviewer,项目名称:bigdataviewer_fiji,代码行数:66,代码来源:ExportSpimFusionPlugIn.java
注:本文中的bdv.spimdata.SpimDataMinimal类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论