本文整理汇总了Python中tests.integration.aws函数的典型用法代码示例。如果您正苦于以下问题:Python aws函数的具体用法?Python aws怎么用?Python aws使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了aws函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: tearDown
def tearDown(self):
if os.path.exists(self.filename1):
os.remove(self.filename1)
aws('s3 rb --force s3://%s' % self.bucket_name)
aws('s3 rb --force s3://%s' % self.bucket_name2)
if os.path.exists(self.filename2):
os.remove(self.filename2)
开发者ID:2mind,项目名称:aws-cli,代码行数:7,代码来源:test_plugin.py
示例2: test_cp_to_and_from_s3
def test_cp_to_and_from_s3(self):
# This tests the ability to put a single file in s3
# move it to a different bucket.
# and download the file locally
bucket_name = self.create_bucket()
# copy file into bucket.
foo_txt = self.files.create_file('foo.txt', 'this is foo.txt')
p = aws('s3 cp %s s3://%s/foo.txt' % (foo_txt, bucket_name))
self.assert_no_errors(p)
# Make sure object is in bucket.
self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))
self.assertEqual(
self.get_key_contents(bucket_name, key_name='foo.txt'),
'this is foo.txt')
self.assertEqual(
self.content_type_for_key(bucket_name, key_name='foo.txt'),
'text/plain')
# Make a new name for the file and copy it locally.
full_path = self.files.full_path('bar.txt')
p = aws('s3 cp s3://%s/foo.txt %s' % (bucket_name, full_path))
self.assert_no_errors(p)
with open(full_path, 'r') as f:
self.assertEqual(f.read(), 'this is foo.txt')
开发者ID:auvik,项目名称:aws-cli,代码行数:28,代码来源:test_plugin.py
示例3: test_exclude_filter_with_delete
def test_exclude_filter_with_delete(self):
# Test for: https://github.com/aws/aws-cli/issues/778
bucket_name = self.create_bucket()
first = self.files.create_file('foo.txt', 'contents')
second = self.files.create_file('bar.py', 'contents')
p = aws("s3 sync %s s3://%s/" % (self.files.rootdir, bucket_name))
self.assert_no_errors(p)
self.assertTrue(self.key_exists(bucket_name, key_name='bar.py'))
os.remove(second)
# We now have the same state as specified in the bug:
# local remote
# ----- ------
#
# foo.txt foo.txt
# bar.py
#
# If we now run --exclude '*.py' --delete, then we should *not*
# delete bar.py and the remote side.
p = aws("s3 sync %s s3://%s/ --exclude '*.py' --delete" % (
self.files.rootdir, bucket_name))
self.assert_no_errors(p)
self.assertTrue(
self.key_exists(bucket_name, key_name='bar.py'),
("The --delete flag was not applied to the receiving "
"end, the 'bar.py' file was deleted even though it was excluded."))
开发者ID:felixcheruiyot,项目名称:aws-cli,代码行数:25,代码来源:test_plugin.py
示例4: test_sync_with_delete_option_with_same_prefix
def test_sync_with_delete_option_with_same_prefix(self):
# Test for issue 440 (https://github.com/aws/aws-cli/issues/440)
# First, we need to create a directory structure that has a dir with
# the same prefix as some of the files:
#
# test/foo.txt
# test-123.txt
# test-321.txt
# test.txt
bucket_name = self.create_bucket()
# create test/foo.txt
nested_dir = os.path.join(self.files.rootdir, 'test')
os.mkdir(nested_dir)
self.files.create_file(os.path.join(nested_dir, 'foo.txt'),
contents='foo.txt contents')
# Then create test-123.txt, test-321.txt, test.txt.
self.files.create_file('test-123.txt', 'test-123.txt contents')
self.files.create_file('test-321.txt', 'test-321.txt contents')
self.files.create_file('test.txt', 'test.txt contents')
# Now sync this content up to s3.
p = aws('s3 sync %s s3://%s/' % (self.files.rootdir, bucket_name))
# Now here's the issue. If we try to sync the contents down
# with the --delete flag we should *not* see any output, the
# sync operation should determine that nothing is different and
# therefore do nothing. We can just use --dryrun to show the issue.
p = aws('s3 sync s3://%s/ %s --dryrun' % (
bucket_name, self.files.rootdir))
# These assertion methods will give better error messages than just
# checking if the output is empty.
self.assertNotIn('download:', p.stdout)
self.assertNotIn('delete:', p.stdout)
self.assertEqual('', p.stdout)
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:34,代码来源:test_plugin.py
示例5: test_set_with_empty_config_file
def test_set_with_empty_config_file(self):
with open(self.config_filename, 'w'):
pass
aws('configure set region us-west-1', env_vars=self.env_vars)
self.assertEqual(
'[default]\n'
'region = us-west-1\n', self.get_config_file_contents())
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:8,代码来源:test_configure.py
示例6: test_mb_rb
def test_mb_rb(self):
p = aws('s3 mb s3://%s' % self.bucket_name)
self.assert_no_errors(p)
response = self.list_buckets()
self.assertIn(self.bucket_name, [b['Name'] for b in response])
p = aws('s3 rb s3://%s' % self.bucket_name)
self.assert_no_errors(p)
开发者ID:auvik,项目名称:aws-cli,代码行数:9,代码来源:test_plugin.py
示例7: test_set_with_updating_value
def test_set_with_updating_value(self):
self.set_config_file_contents(
'[default]\n'
'region = us-west-2\n')
aws('configure set region us-west-1', env_vars=self.env_vars)
self.assertEqual(
'[default]\n'
'region = us-west-1\n', self.get_config_file_contents())
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:9,代码来源:test_configure.py
示例8: test_set_with_commented_out_field
def test_set_with_commented_out_field(self):
self.set_config_file_contents(
'#[preview]\n'
';cloudsearch = true\n')
aws('configure set preview.cloudsearch true', env_vars=self.env_vars)
self.assertEqual(
'#[preview]\n'
';cloudsearch = true\n'
'[preview]\n'
'cloudsearch = true\n', self.get_config_file_contents())
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:10,代码来源:test_configure.py
示例9: test_mv_local_to_s3
def test_mv_local_to_s3(self):
bucket_name = self.create_bucket()
full_path = self.files.create_file('foo.txt', 'this is foo.txt')
aws('s3 mv %s s3://%s/foo.txt' % (full_path,
bucket_name))
# When we move an object, the local file is gone:
self.assertTrue(not os.path.exists(full_path))
# And now resides in s3.
contents = self.get_key_contents(bucket_name, 'foo.txt')
self.assertEqual(contents, 'this is foo.txt')
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:10,代码来源:test_plugin.py
示例10: test_basic_exclude_filter_for_single_file
def test_basic_exclude_filter_for_single_file(self):
full_path = self.files.create_file('foo.txt', 'this is foo.txt')
# With no exclude we should upload the file.
p = aws('s3 cp %s s3://random-bucket-name/ --dryrun' % full_path)
self.assert_no_errors(p)
self.assertIn('(dryrun) upload:', p.stdout)
p2 = aws("s3 cp %s s3://random-bucket-name/ --dryrun --exclude '*'"
% full_path)
self.assert_no_files_would_be_uploaded(p2)
开发者ID:auvik,项目名称:aws-cli,代码行数:10,代码来源:test_plugin.py
示例11: test_mv_s3_to_s3
def test_mv_s3_to_s3(self):
from_bucket = self.create_bucket()
to_bucket = self.create_bucket()
self.put_object(from_bucket, 'foo.txt', 'this is foo.txt')
aws('s3 mv s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
contents = self.get_key_contents(to_bucket, 'foo.txt')
self.assertEqual(contents, 'this is foo.txt')
# And verify that the object no longer exists in the from_bucket.
self.assertTrue(not self.key_exists(from_bucket, key_name='foo.txt'))
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:10,代码来源:test_plugin.py
示例12: test_json_param_parsing
def test_json_param_parsing(self):
# This is convered by unit tests in botocore, but this is a sanity
# check that we get a json response from a json service.
p = aws('swf list-domains --registration-status REGISTERED')
self.assertEqual(p.rc, 0)
self.assertIsInstance(p.json, dict)
p = aws('dynamodb list-tables')
self.assertEqual(p.rc, 0)
self.assertIsInstance(p.json, dict)
开发者ID:2mind,项目名称:aws-cli,代码行数:10,代码来源:test_cli.py
示例13: test_cp_s3_s3_multipart
def test_cp_s3_s3_multipart(self):
from_bucket = self.create_bucket()
to_bucket = self.create_bucket()
file_contents = 'abcd' * (1024 * 1024 * 10)
self.put_object(from_bucket, 'foo.txt', file_contents)
aws('s3 cp s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
contents = self.get_key_contents(to_bucket, 'foo.txt')
self.assertEqual(contents, file_contents)
self.assertTrue(self.key_exists(from_bucket, key_name='foo.txt'))
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:10,代码来源:test_plugin.py
示例14: test_mv_s3_to_s3_multipart
def test_mv_s3_to_s3_multipart(self):
from_bucket = self.create_bucket()
to_bucket = self.create_bucket()
file_contents = 'abcd' * (1024 * 1024 * 10)
self.put_object(from_bucket, 'foo.txt', file_contents)
aws('s3 mv s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
contents = self.get_key_contents(to_bucket, 'foo.txt')
self.assertEqual(contents, file_contents)
# And verify that the object no longer exists in the from_bucket.
self.assertTrue(not self.key_exists(from_bucket, key_name='foo.txt'))
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:11,代码来源:test_plugin.py
示例15: test_mv_s3_to_local
def test_mv_s3_to_local(self):
bucket_name = self.create_bucket()
self.put_object(bucket_name, 'foo.txt', 'this is foo.txt')
full_path = self.files.full_path('foo.txt')
self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))
aws('s3 mv s3://%s/foo.txt %s' % (bucket_name, full_path))
self.assertTrue(os.path.exists(full_path))
with open(full_path, 'r') as f:
self.assertEqual(f.read(), 'this is foo.txt')
# The s3 file should not be there anymore.
self.assertTrue(not self.key_exists(bucket_name, key_name='foo.txt'))
开发者ID:BillTheBest,项目名称:aws-cli,代码行数:11,代码来源:test_plugin.py
示例16: test_upload_download_file_with_spaces
def test_upload_download_file_with_spaces(self):
bucket_name = self.create_bucket()
filename = self.files.create_file('with space.txt', 'contents')
p = aws('s3 cp %s s3://%s/ --recursive' % (self.files.rootdir,
bucket_name))
self.assert_no_errors(p)
os.remove(filename)
# Now download the file back down locally.
p = aws('s3 cp s3://%s/ %s --recursive' % (bucket_name,
self.files.rootdir))
self.assert_no_errors(p)
self.assertEqual(os.listdir(self.files.rootdir)[0], 'with space.txt')
开发者ID:bhalothia,项目名称:aws-cli,代码行数:12,代码来源:test_plugin.py
示例17: test_recur_cp
def test_recur_cp(self):
p = aws('s3 cp %s s3://%s --recursive --quiet' % ('some_dir',
self.bucket_name))
self.assertEqual(p.rc, 0)
p = aws('s3 cp s3://%s %s --recursive --quiet' % (self.bucket_name,
'some_dir'))
self.assertEqual(p.rc, 0)
with open(self.path1, 'rb') as file2:
data = file2.read()
# Ensure the contents are the same.
self.assertEqual(data, b'This is a test.')
开发者ID:2mind,项目名称:aws-cli,代码行数:12,代码来源:test_plugin.py
示例18: test_cp
def test_cp(self):
file_path1 = 'some_dir' + os.sep + self.filename1
file_path2 = 'some_dir' + os.sep + self.filename2
p = aws('s3 cp %s s3://%s --quiet' % (file_path1, self.bucket_name))
self.assertEqual(p.rc, 0)
s3_path = self.bucket_name + self.filename1
p = aws('s3 cp s3://%s %s --quiet' % (s3_path, file_path2))
self.assertEqual(p.rc, 0)
with open(self.path2, 'rb') as file2:
data = file2.read()
# Ensure the contents are the same.
self.assertEqual(data, b'This is a test.')
开发者ID:2mind,项目名称:aws-cli,代码行数:13,代码来源:test_plugin.py
示例19: test_fail_mb_rb
def test_fail_mb_rb(self):
"""
Makes sure that mb and rb fail properly.
Note: mybucket is not available to create and therefore
you cannot delete it as well.
"""
bucket_name = "mybucket"
p = aws('s3 mb s3://%s' % bucket_name)
self.assertIn("BucketAlreadyExists", p.stdout)
bucket_name = "mybucket"
p = aws('s3 rb s3://%s' % bucket_name)
self.assertIn("AccessDenied", p.stdout)
开发者ID:2mind,项目名称:aws-cli,代码行数:13,代码来源:test_plugin.py
示例20: test_sync_file_with_spaces
def test_sync_file_with_spaces(self):
bucket_name = self.create_bucket()
bucket_name = self.create_bucket()
filename = self.files.create_file('with space.txt', 'contents')
p = aws('s3 sync %s s3://%s/' % (self.files.rootdir,
bucket_name))
self.assert_no_errors(p)
# Now syncing again should *not* trigger any uploads (i.e we should
# get nothing on stdout).
p2 = aws('s3 sync %s s3://%s/' % (self.files.rootdir,
bucket_name))
self.assertEqual(p2.stdout, '')
self.assertEqual(p2.stderr, '')
self.assertEqual(p2.rc, 0)
开发者ID:bhalothia,项目名称:aws-cli,代码行数:14,代码来源:test_plugin.py
注:本文中的tests.integration.aws函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论