本文整理汇总了Python中tardis.tardis_portal.models.Dataset_File类的典型用法代码示例。如果您正苦于以下问题:Python Dataset_File类的具体用法?Python Dataset_File怎么用?Python Dataset_File使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Dataset_File类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: clone
def clone(cls, oldInstance, newDescription, username):
newInstance = cls(description=newDescription,
experiment_id=oldInstance.dataset.experiment.id)
for param in oldInstance.parameters:
if param.name.name not in cls.doNotCopyParams:
if param.name.isNumeric():
value = param.numerical_value
else:
value = param.string_value
newInstance.new_param(param.name.name, value)
import shutil
import os
for filename in oldInstance.get_params("uploaded_file", value=True):
if filename[-8:] != ".jobfile":
thisfile = Dataset_File.objects.get(
dataset=oldInstance.dataset,
filename=filename)
shutil.copy(thisfile.get_absolute_filepath(),
get_full_staging_path(username))
newfileurl = os.path.join(get_full_staging_path(username),
filename)
newDatafile = Dataset_File(
dataset=newInstance.dataset,
url=newfileurl,
protocol="staging",
mimetype=thisfile.mimetype,
)
newDatafile.save()
return newInstance
开发者ID:aaryani,项目名称:aa-migration-test1,代码行数:29,代码来源:task.py
示例2: add_datafile_to_dataset
def add_datafile_to_dataset(dataset, filepath, size):
"""
Adds datafile metadata to a dataset
:param dataset: dataset who's directory to be written to
:type dataset: :class:`tardis.tardis_portal.models.Dataset`
:param filepath: The full os path to the file
:type filepath: string
:param size: The file size in bytes
:type size: string
:rtype: The new datafile object
"""
experiment_path = path.join(settings.FILE_STORE_PATH,
str(dataset.experiment.id))
dataset_path = path.join(experiment_path, str(dataset.id))
urlpath = 'file:/' + filepath[len(experiment_path):]
filename = urlpath.rpartition('/')[2]
datafile = Dataset_File(dataset=dataset, filename=filename,
url=urlpath, size=size, protocol='')
datafile.save()
return datafile
开发者ID:grischa,项目名称:mytardis-mrtardis,代码行数:25,代码来源:staging.py
示例3: _create_test_dataset
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
df_.save()
ds_.save()
return ds_
开发者ID:JasonBoyka,项目名称:mytardis,代码行数:8,代码来源:test_rmexperiment.py
示例4: _make_dataset
def _make_dataset(self, exp, filenames):
dataset = Dataset(experiment=exp)
dataset.save()
for filename in filenames:
df = Dataset_File(dataset=dataset, size=41, protocol='file')
df.filename = filename
df.url = 'file://' + path.join(path.dirname(__file__), 'data', df.filename)
df.save()
开发者ID:conkiztador,项目名称:mytardis,代码行数:8,代码来源:test_integrity.py
示例5: setUp
def setUp(self):
# create a test user
self.user = User.objects.create_user(username='DownloadTestUser',
email='',
password='secret')
# create a public experiment
self.experiment1 = Experiment(title='Experiment 1',
created_by=self.user,
public=True)
self.experiment1.save()
# create a non-public experiment
self.experiment2 = Experiment(title='Experiment 2',
created_by=self.user,
public=False)
self.experiment2.save()
# dataset1 belongs to experiment1
self.dataset1 = Dataset(experiment=self.experiment1)
self.dataset1.save()
# dataset2 belongs to experiment2
self.dataset2 = Dataset(experiment=self.experiment2)
self.dataset2.save()
# absolute path first
filename = 'testfile.txt'
self.dest1 = abspath(join(settings.FILE_STORE_PATH, '%s'
% self.experiment1.id))
self.dest2 = abspath(join(settings.FILE_STORE_PATH, '%s'
% self.experiment2.id))
if not exists(self.dest1):
mkdir(self.dest1)
if not exists(self.dest2):
mkdir(self.dest2)
testfile1 = abspath(join(self.dest1, filename))
f = open(testfile1, 'w')
f.write("Hello World!\n")
f.close()
testfile2 = abspath(join(self.dest2, filename))
f = open(testfile2, 'w')
f.write("Hello World!\n")
f.close()
self.dataset_file1 = Dataset_File(dataset=self.dataset1,
filename=filename,
protocol='tardis',
url='tardis://%s' % filename)
self.dataset_file1.save()
self.dataset_file2 = Dataset_File(dataset=self.dataset2,
filename=basename(filename),
protocol='tardis',
url='tardis://%s' % filename)
self.dataset_file2.save()
开发者ID:grischa,项目名称:mytardis-mrtardis,代码行数:58,代码来源:test_download.py
示例6: _build
def _build(dataset, filename, url, protocol):
from tardis.tardis_portal.models import \
Dataset_File, Replica, Location
datafile = Dataset_File(dataset=dataset, filename=filename)
datafile.save()
replica = Replica(datafile=datafile, url=url,
protocol=protocol,
location=Location.get_default_location())
replica.save()
return datafile
开发者ID:cvl-em-apm,项目名称:mytardis,代码行数:10,代码来源:test_models.py
示例7: add_staged_file_to_dataset
def add_staged_file_to_dataset(rel_filepath, dataset_id, username,
mimetype="application/octet-stream"):
"""
add file in user's staging path to a dataset
may be replaced by main code functions.
quick and dirty hack to get it working
"""
originfilepath = os.path.join(get_full_staging_path(username), rel_filepath)
dataset = Dataset.objects.get(pk=dataset_id)
newDatafile = Dataset_File()
newDatafile.dataset = dataset
newDatafile.size = os.path.getsize(originfilepath)
newDatafile.protocol = "tardis"
newDatafile.mimetype = mimetype
file_dir = "/" + str(dataset.experiment.id) + "/" + str(dataset.id) + "/"
file_path = file_dir + rel_filepath
prelim_full_file_path = settings.FILE_STORE_PATH + file_path
full_file_path = duplicate_file_check_rename(prelim_full_file_path)
newDatafile.filename = os.path.basename(full_file_path)
newDatafile.url = "%s://%s" % (newDatafile.protocol,
full_file_path[
len(settings.FILE_STORE_PATH) + len(file_dir):])
if not os.path.exists(os.path.dirname(full_file_path)):
os.makedirs(os.path.dirname(full_file_path))
shutil.move(originfilepath, full_file_path)
newDatafile.save()
开发者ID:aaryani,项目名称:aa-migration-test1,代码行数:26,代码来源:utils.py
示例8: _create_test_dataset
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, size='21', sha512sum='bogus')
df_.save()
rep_ = Replica(datafile=df_,
url='http://planet-python.org/' + str(_next_id()),
location=Location.get_default_location())
rep_.save()
ds_.save()
return ds_
开发者ID:TheGoodRob,项目名称:mytardis,代码行数:12,代码来源:test_rmexperiment.py
示例9: create_staging_datafile
def create_staging_datafile(filepath, username, dataset_id):
dataset = Dataset.objects.get(id=dataset_id)
url, size = get_staging_url_and_size(username, filepath)
datafile = Dataset_File(dataset=dataset,
filename=path.basename(filepath),
size=size)
replica = Replica(datafile=datafile,
protocol='staging',
url=url,
location=Location.get_location('staging'))
replica.verify(allowEmptyChecksums=True)
datafile.save()
replica.datafile = datafile
replica.save()
开发者ID:crawley,项目名称:mytardis,代码行数:15,代码来源:tasks.py
示例10: create_datafile
def create_datafile(index):
testfile = path.join(path.dirname(__file__), 'fixtures',
'jeol_sem_test%d.txt' % index)
size, sha512sum = get_size_and_sha512sum(testfile)
datafile = Dataset_File(dataset=dataset,
filename=path.basename(testfile),
url='file://'+path.abspath(testfile),
protocol='file',
size=size,
sha512sum=sha512sum)
datafile.verify()
datafile.save()
return datafile
开发者ID:JMSS-IT-11-2012,项目名称:mytardis,代码行数:15,代码来源:test_jeolsem.py
示例11: _make_data_file
def _make_data_file(dataset, filename, content):
# TODO:
# create datasetfile
f = mktemp()
print "Inside make data file ", f
open(f, "w+b").write(content)
df = Dataset_File()
df.dataset = dataset
df.filename = filename
df.url = 'file://'+f
df.protocol = "staging"
df.size = len(content)
df.verify(allowEmptyChecksums=True)
df.save()
print "Df ---", df
开发者ID:bioscience-data-platform,项目名称:mytardis_hpc_app,代码行数:16,代码来源:views.py
示例12: testRemoteFile
def testRemoteFile(self):
content = urandom(1024)
with NamedTemporaryFile() as f:
# Create new Datafile
datafile = Dataset_File(dataset=self.dataset)
datafile.filename = 'background_task_testfile'
datafile.size = len(content)
datafile.sha512sum = hashlib.sha512(content).hexdigest()
datafile.url = 'file://' + path.abspath(f.name)
datafile.save()
def get_datafile(datafile):
return Dataset_File.objects.get(id=datafile.id)
# Check that it won't verify as it stands
expect(get_datafile(datafile).verified).to_be(False)
verify_files()
expect(get_datafile(datafile).verified).to_be(False)
expect(get_datafile(datafile).is_local()).to_be(False)
# Fill in the content
f.write(content)
f.flush()
# Check it now verifies
verify_files()
expect(get_datafile(datafile).verified).to_be(True)
expect(get_datafile(datafile).is_local()).to_be(True)
开发者ID:JMSS-IT-11-2012,项目名称:mytardis,代码行数:29,代码来源:test_tasks.py
示例13: aadd_staged_file_to_dataset
def aadd_staged_file_to_dataset(rel_filepath, dataset_id, username,
mimetype="application/octet-stream"):
"""
add file in user's staging path to a dataset
may be replaced by main code functions.
quick and dirty hack to get it working
"""
originfilepath = os.path.join(get_full_staging_path(username),
rel_filepath)
dataset = Dataset.objects.get(pk=dataset_id)
newDatafile = Dataset_File(
dataset=dataset,
url=originfilepath,
protocol="staging",
mimetype=mimetype,
)
newDatafile.save()
开发者ID:aaryani,项目名称:aa-migration-test1,代码行数:18,代码来源:utils.py
示例14: _create_datafile
def _create_datafile():
user = User.objects.create_user("testuser", "[email protected]", "pwd")
user.save()
UserProfile(user=user).save()
Location.force_initialize()
full_access = Experiment.PUBLIC_ACCESS_FULL
experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
experiment.save()
ObjectACL(
content_object=experiment,
pluginId="django_user",
entityId=str(user.id),
isOwner=True,
canRead=True,
canWrite=True,
canDelete=True,
aclOwnershipType=ObjectACL.OWNER_OWNED,
).save()
dataset = Dataset()
dataset.save()
dataset.experiments.add(experiment)
dataset.save()
# Create new Datafile
tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
with Image(filename="magick:rose") as img:
img.format = "tiff"
img.save(file=tempfile.file)
tempfile.file.flush()
datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file")
replica = Replica(
datafile=datafile,
url=write_uploaded_file_to_dataset(dataset, tempfile),
location=Location.get_default_location(),
)
replica.verify(allowEmptyChecksums=True)
datafile.save()
replica.datafile = datafile
replica.save()
return datafile
开发者ID:crawley,项目名称:mytardis,代码行数:42,代码来源:test_iiif.py
示例15: fpupload
def fpupload(request, dataset_id):
"""
Uploads all files picked by filepicker to the dataset
:param request: a HTTP Request instance
:type request: :class:`django.http.HttpRequest`
:param dataset_id: the dataset_id
:type dataset_id: integer
:returns: boolean true if successful
:rtype: bool
"""
dataset = Dataset.objects.get(id=dataset_id)
logger.debug('called fpupload')
if request.method == 'POST':
logger.debug('got POST')
for key, val in request.POST.items():
splits = val.split(",")
for url in splits:
try:
fp = FilepickerFile(url)
except ValueError:
pass
else:
picked_file = fp.get_file()
filepath = write_uploaded_file_to_dataset(dataset,
picked_file)
datafile = Dataset_File(dataset=dataset,
filename=picked_file.name,
size=picked_file.size)
replica = Replica(datafile=datafile,
url=filepath,
protocol='',
location=Location.get_default_location())
replica.verify(allowEmptyChecksums=True)
datafile.save()
replica.datafile = datafile
replica.save()
return HttpResponse(json.dumps({"result": True}))
开发者ID:TheGoodRob,项目名称:mytardis,代码行数:41,代码来源:views.py
示例16: create_datafile
def create_datafile(file_path):
testfile = path.join(path.dirname(__file__), 'fixtures', file_path)
size, sha512sum = get_size_and_sha512sum(testfile)
datafile = Dataset_File(dataset=dataset,
filename=path.basename(testfile),
size=size,
sha512sum=sha512sum)
datafile.save()
base_url = 'file://' + path.abspath(path.dirname(testfile))
location = Location.load_location({
'name': 'test-flexstation', 'url': base_url, 'type': 'external',
'priority': 10, 'transfer_provider': 'local'})
replica = Replica(datafile=datafile,
url='file://'+ path.abspath(testfile),
protocol='file',
location=location)
replica.verify()
replica.save()
return Dataset_File.objects.get(pk=datafile.pk)
开发者ID:guillaumeprevost,项目名称:hiri-tardis-filter,代码行数:21,代码来源:test_flexstation.py
示例17: process_enclosure
def process_enclosure(self, dataset, enclosure):
filename = getattr(enclosure, 'title', basename(enclosure.href))
datafile = Dataset_File(filename=filename, dataset=dataset)
try:
datafile.mimetype = enclosure.mime
except AttributeError:
pass
try:
datafile.size = enclosure.length
except AttributeError:
pass
try:
hash = enclosure.hash
# Split on white space, then ':' to get tuples to feed into dict
hashdict = dict([s.partition(':')[::2] for s in hash.split()])
# Set SHA-512 sum
datafile.sha512sum = hashdict['sha-512']
except AttributeError:
pass
datafile.save()
url = enclosure.href
# This means we will allow the atom feed to feed us any enclosure
# URL that matches a registered location. Maybe we should restrict
# this to a specific location.
location = Location.get_location_for_url(url)
if not location:
logger.error('Rejected ingestion for unknown location %s' % url)
return
replica = Replica(datafile=datafile, url=url,
location=location)
replica.protocol = enclosure.href.partition('://')[0]
replica.save()
self.make_local_copy(replica)
开发者ID:crawley,项目名称:mytardis-app-atom,代码行数:34,代码来源:atom_ingest.py
示例18: test_hrmc_filter
def test_hrmc_filter(self):
"""
Make an experiment, lood up grexp file and check
dataset schema missing, then loadup grfinal and check dataset schema
created
"""
user = _create_test_user()
license = _create_license()
exp = _create_test_experiment(user, license)
ds = Dataset(description='happy snaps of plumage')
ds.save()
_create_test_dataset(ds, exp.id,
{"output.dat": 'hello', "grexp.dat": '2 5\n6 15\n'})
ds.experiments.add(exp)
ds.save()
sch = Schema(namespace=self.HRMCSCHEMA,
name="hrmc_views", type=Schema.DATASET)
sch.save()
param = ParameterName(schema=sch, name="plot",
full_name="scatterplot", units="image",
data_type=ParameterName.FILENAME
)
param.save()
param_sets = get_param_sets(ds)
self.assertEquals(list(param_sets), [])
_create_test_dataset(ds, exp.id, {'grfinal21.dat': "1 3\n5 14\n"})
df2 = Dataset_File(dataset=ds, url='path/grfinal21.dat')
df2.save()
h = hrmc.HRMCOutput('HRMC', self.HRMCSCHEMA)
h(sender=Dataset_File, instance=df2)
param_sets = get_param_sets(ds)
self.assertEquals([x.schema.namespace for x in param_sets],
[self.HRMCSCHEMA])
开发者ID:bioscience-data-platform,项目名称:mytardis-app-hrmcoutput,代码行数:40,代码来源:test_view.py
示例19: testLocalFile
def testLocalFile(self):
content = urandom(1024)
cf = ContentFile(content, 'background_task_testfile')
# Create new Datafile
datafile = Dataset_File(dataset=self.dataset)
datafile.filename = cf.name
datafile.size = len(content)
datafile.sha512sum = hashlib.sha512(content).hexdigest()
datafile.save()
replica = Replica(datafile=datafile,
url=write_uploaded_file_to_dataset(self.dataset, cf),
location=Location.get_default_location())
replica.save()
def get_replica(datafile):
return Replica.objects.get(datafile=datafile)
# undo auto-verify:
replica.verified = False
replica.save(update_fields=['verified'])
# Check that it's not currently verified
expect(get_replica(datafile).verified).to_be(False)
# Check it verifies
verify_files()
expect(get_replica(datafile).verified).to_be(True)
开发者ID:TheGoodRob,项目名称:mytardis,代码行数:28,代码来源:test_tasks.py
示例20: _build_datafile
def _build_datafile(self, testfile, filename, dataset, url,
protocol='', checksum=None, size=None, mimetype=''):
filesize, sha512sum = get_size_and_sha512sum(testfile)
datafile = Dataset_File(dataset=dataset, filename=filename,
mimetype=mimetype,
size=str(size if size != None else filesize),
sha512sum=(checksum if checksum else sha512sum))
datafile.save()
if urlparse.urlparse(url).scheme == '':
location = Location.get_location('local')
else:
location = Location.get_location_for_url(url)
if not location:
location = Location.load_location({
'name': filename, 'url': urlparse.urljoin(url, '.'),
'type': 'external',
'priority': 10, 'transfer_provider': 'local'})
replica = Replica(datafile=datafile, protocol=protocol, url=url,
location=location)
replica.verify()
replica.save()
return Dataset_File.objects.get(pk=datafile.pk)
开发者ID:crawley,项目名称:mytardis,代码行数:22,代码来源:test_download.py
注:本文中的tardis.tardis_portal.models.Dataset_File类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论