• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python connection.get_collection函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中pulp.server.db.connection.get_collection函数的典型用法代码示例。如果您正苦于以下问题:Python get_collection函数的具体用法?Python get_collection怎么用?Python get_collection使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了get_collection函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: migrate

def migrate(*args, **kwargs):
    """
    For each repository with a yum distributor, clean up the old yum distributor's
    mess and re-publish the repository with the new distributor.
    """

    distributor_collection = get_collection('repo_distributors')
    yum_distributors = list(distributor_collection.find({'distributor_type_id': YUM_DISTRIBUTOR_ID}))

    repo_collection = get_collection('repos')
    repo_ids = list(set(d['repo_id'] for d in yum_distributors))
    repos = dict((r['id'], r) for r in repo_collection.find({'id': {'$in': repo_ids}}))

    for d in yum_distributors:
        repo = repos[d['repo_id']]
        config = d['config'] or {}

        if d['last_publish'] is None:
            continue

        _clear_working_dir(repo)
        _clear_old_publish_dirs(repo, config)
        _re_publish_repository(repo, d)

    _remove_legacy_publish_dirs()
开发者ID:pombredanne,项目名称:rcm-pulp-rpm,代码行数:25,代码来源:0015_new_yum_distributor.py


示例2: _create_or_update_type

def _create_or_update_type(type_def):
    """
    This method creates or updates a type definition in MongoDB.

    :param type_def: the type definition to update or create. If a type definition with the same
                     as an existing type, the type is updated, otherwise it is created.
    :type  type_def: ContentType

    :return: This method will always return None
    :rtype:  None
    """
    # Make sure a collection exists for the type
    database = pulp_db.get_database()
    collection_name = unit_collection_name(type_def.id)

    if collection_name not in database.collection_names():
        pulp_db.get_collection(collection_name, create=True)

    # Add or update an entry in the types list
    content_type_collection = ContentType.get_collection()
    content_type = ContentType(
        type_def.id, type_def.display_name, type_def.description, type_def.unit_key,
        type_def.search_indexes, type_def.referenced_types)
    # no longer rely on _id = id
    existing_type = content_type_collection.find_one({'id': type_def.id}, fields=[])
    if existing_type is not None:
        content_type._id = existing_type['_id']
    # XXX this still causes a potential race condition when 2 users are updating the same type
    content_type_collection.save(content_type, safe=True)
开发者ID:hgschmie,项目名称:pulp,代码行数:29,代码来源:database.py


示例3: migrate

def migrate(*args, **kwargs):
    schedule_collection = connection.get_collection('scheduled_calls')
    importer_collection = connection.get_collection('repo_importers')
    distributor_collection = connection.get_collection('repo_distributors')

    map(functools.partial(convert_schedule, schedule_collection.save), schedule_collection.find())
    move_scheduled_syncs(importer_collection, schedule_collection)
    move_scheduled_publishes(distributor_collection, schedule_collection)
开发者ID:BrnoPCmaniak,项目名称:pulp,代码行数:8,代码来源:0007_scheduled_task_conversion.py


示例4: repositories_with_yum_importers

def repositories_with_yum_importers():
    repo_importer_collection = get_collection('repo_importers')
    repo_yum_importers = repo_importer_collection.find({'importer_type_id': _TYPE_YUM_IMPORTER},
                                                       fields=['repo_id'])
    yum_repo_ids = [i['repo_id'] for i in repo_yum_importers]
    repo_collection = get_collection('repos')
    yum_repos = repo_collection.find({'repo_id': {'$in': yum_repo_ids}},
                                     fields=['repo_id', 'scratchpad'])
    return list(yum_repos)
开发者ID:FlorianHeigl,项目名称:pulp_rpm,代码行数:9,代码来源:0007_inventoried_custom_metadata.py


示例5: tearDown

    def tearDown(self):
        super(Migration0004Tests, self).tearDown()

        # Delete any sample data added for the test
        types_db.clean()

        RepoContentUnit.get_collection().remove()
        get_collection('repo_importers').remove()
        model.Repository.drop_collection()
开发者ID:FlorianHeigl,项目名称:pulp_rpm,代码行数:9,代码来源:test_0004_migrate.py


示例6: _migrate_rpmlike_units

def _migrate_rpmlike_units(unit_type):
    """
    This function performs the migration on RPMs, DRPMs, and SRPMs. These all have the same schema
    when it comes to checksumtype, so they can be treated the same way.

    :param unit_type:          The unit_type_id, as found in pulp_rpm.common.ids.
    :type  unit_type:          basestring
    """
    repos = connection.get_collection('repos')
    repo_content_units = connection.get_collection('repo_content_units')
    unit_collection = connection.get_collection('units_%s' % unit_type)

    for unit in unit_collection.find():
        try:
            sanitized_type = verification.sanitize_checksum_type(unit['checksumtype'])
            if sanitized_type != unit['checksumtype']:
                # Let's see if we can get away with changing its checksumtype to the sanitized
                # value. If this works, we won't have to do anything else.
                unit_collection.update({'_id': unit['_id']},
                                       {'$set': {'checksumtype': sanitized_type}})
        except errors.DuplicateKeyError:
            # Looks like there is already an identical unit with the sanitized checksum type. This
            # means we need to remove the current unit, but first we will need to change any
            # references to this unit to point to the other.
            conflicting_unit = unit_collection.find_one(
                {'name': unit['name'], 'epoch': unit['epoch'], 'version': unit['version'],
                 'release': unit['release'], 'arch': unit['arch'], 'checksum': unit['checksum'],
                 'checksumtype': sanitized_type})
            for rcu in repo_content_units.find({'unit_type_id': unit_type, 'unit_id': unit['_id']}):
                # Now we must either switch the rcu from pointing to unit to pointing to
                # conflicting_unit, or delete the rcu if there is already one in the same repo.
                try:
                    msg = _('Updating %(repo_id)s to contain %(type)s %(conflicting)s instead of '
                            '%(old_id)s.')
                    msg = msg % {'repo_id': rcu['repo_id'], 'type': unit_type,
                                 'conflicting': conflicting_unit['_id'], 'old_id': unit['_id']}
                    _logger.debug(msg)
                    repo_content_units.update({'_id': rcu['_id']},
                                              {'$set': {'unit_id': conflicting_unit['_id']}})
                except errors.DuplicateKeyError:
                    # We will delete this RepoContentUnit since the sha1 RPM is already in the
                    # repository.
                    msg = _('Removing %(type)s %(old_id)s from repo %(repo_id)s since it conflicts '
                            'with %(conflicting)s.')
                    msg = msg % {'repo_id': rcu['repo_id'], 'type': unit_type,
                                 'conflicting': conflicting_unit['_id'], 'old_id': unit['_id']}
                    _logger.debug(msg)
                    repo_content_units.remove({'_id': rcu['_id']})
                    # In this case, we now need to decrement the repository's "content_unit_counts"
                    # for this unit_type by one, since we removed a unit from a repository.
                    repos.update(
                        {'id': rcu['repo_id']},
                        {'$inc': {'content_unit_counts.%s' % unit_type: -1}})
            # Now that we have removed or altered all references to the "sha" Unit, we need to
            # remove it since it is a duplicate.
            unit_collection.remove({'_id': unit['_id']})
开发者ID:AndreaGiardini,项目名称:pulp_rpm,代码行数:56,代码来源:0017_merge_sha_sha1.py


示例7: create_user

    def create_user(login, password=None, name=None, roles=None):
        """
        Creates a new Pulp user and adds it to specified to roles.

        @param login: login name / unique identifier for the user
        @type  login: str

        @param password: password for login credentials
        @type  password: str

        @param name: user's full name
        @type  name: str

        @param roles: list of roles user will belong to
        @type  roles: list

        @raise DuplicateResource: if there is already a user with the requested login
        @raise InvalidValue: if any of the fields are unacceptable
        """

        existing_user = get_collection('users').find_one({'login': login})
        if existing_user is not None:
            raise DuplicateResource(login)

        invalid_values = []

        if login is None or _USER_LOGIN_REGEX.match(login) is None:
            invalid_values.append('login')
        if invalid_type(name, basestring):
            invalid_values.append('name')
        if invalid_type(roles, list):
            invalid_values.append('roles')

        if invalid_values:
            raise InvalidValue(invalid_values)

        # Use the login for name of the user if one was not specified
        name = name or login
        roles = roles or None

        # Creation
        create_me = model.User(login=login, name=name, roles=roles)
        create_me.set_password(password)
        create_me.save()

        # Grant permissions
        permission_manager = factory.permission_manager()
        permission_manager.grant_automatic_permissions_for_user(create_me.login)

        # Retrieve the user to return the SON object
        created = get_collection('users').find_one({'login': login})
        created.pop('password')

        return created
开发者ID:maxamillion,项目名称:pulp,代码行数:54,代码来源:managers.py


示例8: _remove_prestodelta_repo_units

def _remove_prestodelta_repo_units():
    """
    Remove all prestodelta repo_content_units since they should not have been created
    to begin with.
    """
    metadata_collection = get_collection('units_yum_repo_metadata_file')
    repo_units_collection = get_collection('repo_content_units')
    for presto_unit in metadata_collection.find({'data_type': 'prestodelta'}):
        # remove any repo repo units that reference it, the unit itself will
        # be removed by the orphan cleanup at some point in the future
        repo_units_collection.remove({'unit_id': presto_unit['_id']})
开发者ID:BrnoPCmaniak,项目名称:pulp_rpm,代码行数:11,代码来源:0020_nested_drpm_directory.py


示例9: migrate

def migrate(*args, **kwargs):
    """
    For each repository with a yum distributor, clean up the old yum distributor's
    mess and re-publish the repository with the new distributor.
    """
    if not api._is_initialized():
        api.initialize()

    distributor_collection = get_collection('repo_distributors')
    yum_distributors = list(
        distributor_collection.find({'distributor_type_id': YUM_DISTRIBUTOR_ID}))

    repo_ids = list(set(d['repo_id'] for d in yum_distributors))
    repo_objs = model.Repository.objects(repo_id__in=repo_ids)
    repos = dict((repo_obj.repo_id, repo_obj.to_transfer_repo()) for repo_obj in repo_objs)

    for d in yum_distributors:
        repo = repos[d['repo_id']]
        config = d['config'] or {}

        if d.get('last_publish') is None:
            continue

        _clear_working_dir(repo)
        _clear_old_publish_dirs(repo, config)
        _re_publish_repository(repo, d)

    _remove_legacy_publish_dirs()
开发者ID:ATIX-AG,项目名称:pulp_rpm,代码行数:28,代码来源:0016_new_yum_distributor.py


示例10: __init__

 def __init__(self):
     """
     Call super with collection and fields.
     """
     key_fields = ("data_type", "repo_id")
     collection = connection.get_collection("units_yum_repo_metadata_file")
     super(YumMetadataFile, self).__init__(collection, key_fields)
开发者ID:seandst,项目名称:pulp_rpm,代码行数:7,代码来源:0028_standard_storage_path.py


示例11: migrate

def migrate(*args, **kwargs):
    """
    Add last_updated and last_override_config to the importer collection.
    """
    updated_key = 'last_updated'
    config_key = 'last_override_config'
    collection = get_collection('repo_importers')

    for importer in collection.find():
        modified = False

        if config_key not in importer:
            importer[config_key] = {}
            modified = True

        # If the key doesn't exist, or does exist but has no value, set it based on the
        # last sync time, if possible. Otherwise, set it to now.
        if not importer.get(updated_key, None):
            try:
                importer[updated_key] = isodate.parse_datetime(importer['last_sync'])
            # The attribute doesn't exist, or parsing failed. It's safe to set a newer timestamp.
            except: # noqa: 722
                importer[updated_key] = datetime.datetime.now(tz=isodate.UTC)
            modified = True

        if modified:
            collection.save(importer)
开发者ID:alexxa,项目名称:pulp,代码行数:27,代码来源:0027_importer_schema_change.py


示例12: migrate

def migrate(*args, **kwargs):
    """
    Migrate existing yum importers to use the new configuration key names.

    This migration has the consolidation of verify_checksum and verify_size into a single
    config value. For simplicity, the value for verify_checksum is used as the new setting
    and verify_size is discarded.

    The newest flag in the old config was redundant; the num_old_packages serves the
    same purpose. The newest flag is discarded.

    The purge_orphaned flag was a carry over from v1 and has no effect. It's documented in
    the old yum importer but I'm not sure it was actually used. This migration will attempt
    to delete it anyway just in case.
    """

    repo_importers = get_collection('repo_importers')

    rename_query = {'$rename': {
        'config.feed_url'         : 'config.feed',
        'config.ssl_verify'       : 'config.ssl_validation',
        'config.proxy_url'        : 'config.proxy_host',
        'config.proxy_user'       : 'config.proxy_username',
        'config.proxy_pass'       : 'config.proxy_password',
        'config.num_threads'      : 'config.max_downloads',
        'config.verify_checksum'  : 'config.validate', # see comment above
        'config.remove_old'       : 'config.remove_missing',
        'config.num_old_packages' : 'config.retain_old_count',
    }}
    repo_importers.update({'importer_type_id': 'yum_importer'}, rename_query, safe=True, multi=True)

    remove_query = {'$unset' : {'config.newest' : 1,
                                'config.verify_size' : 1,
                                'config.purge_orphaned' : 1}}
    repo_importers.update({'importer_type_id': 'yum_importer'}, remove_query, safe=True, multi=True)
开发者ID:bechtoldt,项目名称:pulp_rpm,代码行数:35,代码来源:0010_yum_importer_config_keys.py


示例13: migrate

def migrate(*args, **kwargs):
    """
    Migrate existing errata to have the key "from" instead of "from_str"
    """
    errata_collection = get_collection('units_erratum')
    rename_query = {'$rename': {'from_str': 'from'}}
    errata_collection.update({}, rename_query, multi=True)
开发者ID:dkliban,项目名称:pulp_rpm,代码行数:7,代码来源:0013_errata_from_str.py


示例14: _migrate_task_status

def _migrate_task_status():
    """
    Find all task_status documents in an incomplete state and set the state to canceled.
    """
    task_status = connection.get_collection('task_status')
    task_status.update({'state': {'$in': CALL_INCOMPLETE_STATES}},
                       {'$set': {'state': CALL_CANCELED_STATE }}, multi=True)
开发者ID:noris-network,项目名称:pulp,代码行数:7,代码来源:0012_reserved_resources_schema_change.py


示例15: _update_indexes

def _update_indexes(type_def, unique):

    collection_name = unit_collection_name(type_def.id)
    collection = connection.get_collection(collection_name, create=False)

    if unique:
        index_list = [type_def.unit_key]  # treat the key as a compound key
    else:
        index_list = type_def.search_indexes

    if index_list is None:
        return

    for index in index_list:

        if isinstance(index, (list, tuple)):
            msg = "Ensuring index [%s] (unique: %s) on type definition [%s]"
            msg = msg % (", ".join(index), unique, type_def.id)
            _logger.debug(msg)
            mongo_index = _create_index_keypair(index)
        else:
            msg = "Ensuring index [%s] (unique: %s) on type definition [%s]"
            msg = msg % (index, unique, type_def.id)
            _logger.debug(msg)
            mongo_index = index

        index_name = collection.ensure_index(mongo_index, unique=unique)

        if index_name is not None:
            _logger.debug("Index [%s] created on type definition [%s]" % (index_name, type_def.id))
        else:
            _logger.debug("Index already existed on type definition [%s]" % type_def.id)
开发者ID:maxamillion,项目名称:pulp,代码行数:32,代码来源:database.py


示例16: test_update_unit_key_multiple_fields

    def test_update_unit_key_multiple_fields(self):
        """
        Tests that a multiple field unit key is built as a single, compound index
        """

        # Setup
        unit_key = ['compound_1', 'compound_2']
        type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', unit_key, None, [])

        # Test
        types_db._update_unit_key(type_def)

        # Verify
        collection_name = types_db.unit_collection_name(type_def.id)
        collection = pulp_db.get_collection(collection_name)

        index_dict = collection.index_information()

        self.assertEqual(2, len(index_dict)) # default (_id) + unit key

        index = index_dict['compound_1_1_compound_2_1']
        self.assertTrue(index['unique'])

        keys = index['key']
        self.assertEqual(2, len(keys))
        self.assertEqual('compound_1', keys[0][0])
        self.assertEqual(types_db.ASCENDING, keys[0][1])
        self.assertEqual('compound_2', keys[1][0])
        self.assertEqual(types_db.ASCENDING, keys[1][1])
开发者ID:ashcrow,项目名称:pulp,代码行数:29,代码来源:test_types_database.py


示例17: _migrate_rpm_repositories

def _migrate_rpm_repositories():
    '''
    This migration takes care of adding export_distributor to all the old rpm repos
    with no export_distributor already associated to them. Since we have renamed iso_distributor
    to export_distributor, it also removes iso_distributor associated with an rpm repo.
    '''
    collection = get_collection('repo_distributors')
    for repo_distributor in collection.find():

        # Check only for rpm repos
        if repo_distributor['distributor_type_id'] == ids.TYPE_ID_DISTRIBUTOR_YUM:

            # Check if an export_distributor exists for the same repo
            if collection.find_one({'repo_id': repo_distributor['repo_id'],
                                    'distributor_type_id': ids.TYPE_ID_DISTRIBUTOR_EXPORT}) is None:
                # If not, create a new one with default config
                export_distributor = model.Distributor(
                    repo_id=repo_distributor['repo_id'],
                    distributor_id=ids.EXPORT_DISTRIBUTOR_ID,
                    distributor_type_id=ids.TYPE_ID_DISTRIBUTOR_EXPORT,
                    config=EXPORT_DISTRIBUTOR_CONFIG,
                    auto_publish=False)
                collection.save(export_distributor)

            # Remove iso_distributor associated with the repo
            iso_distributor = collection.find_one(
                {'repo_id': repo_distributor['repo_id'], 'distributor_type_id': 'iso_distributor'})
            if iso_distributor is not None:
                collection.remove(iso_distributor)
开发者ID:dkliban,项目名称:pulp_rpm,代码行数:29,代码来源:0001_rpm_add_export_distributor.py


示例18: setUp

    def setUp(self):
        super(self.__class__, self).setUp()
        self.repo_importers = get_collection('repo_importers')

        importers = (
            {"repo_id": "proxy",
             "importer_type_id": "iso_importer", "last_sync": "2013-04-09T16:57:06-04:00",
             "scheduled_syncs": [], "scratchpad": None,
             "config": {
                "proxy_user": "rbarlow",
                "feed_url": "http://pkilambi.fedorapeople.org/test_file_repo/",
                "proxy_url": "localhost", "proxy_password": "password", "proxy_port": 3128,
                "id": "proxy" },
             "id": "iso_importer"},
            # This one has only the configs that were changed set
            {'repo_id': 'test', 'importer_type_id': 'iso_importer',
             'config': {
                'feed_url': 'http://feed.com/isos', 'num_threads': 42,
                'proxy_url': 'proxy.com', 'proxy_user': 'jeeves',
                'remove_missing_units': False, 'validate_units': True},
             'id': 'iso_importer'},
            # This is here just to make sure we ignore it with our query, since this
            # migration should only alter ISOImporters
            {'repo_id': 'a_yum_repo', 'importer_type_id': 'yum_importer',
             'config': {'feed_url': 'This should not change.'}},
        )

        for importer in importers:
            self.repo_importers.save(importer, safe=True)
开发者ID:bechtoldt,项目名称:pulp_rpm,代码行数:29,代码来源:test_0009_iso_importer_config_keys.py


示例19: test_drop_indexes

    def test_drop_indexes(self):
        """
        Tests updating indexes on an existing collection with different indexes correctly changes them.
        """

        # Setup
        old_key = ['compound_1', 'compound_2']

        type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', old_key, None, [])
        types_db._update_unit_key(type_def)

        # Test
        new_key = ['new_1']
        type_def.unit_key = new_key

        types_db._drop_indexes(type_def)
        types_db._update_unit_key(type_def)

        # Verify
        collection_name = types_db.unit_collection_name(type_def.id)
        collection = pulp_db.get_collection(collection_name)

        index_dict = collection.index_information()

        self.assertEqual(2, len(index_dict)) # default (_id) + new one
开发者ID:ashcrow,项目名称:pulp,代码行数:25,代码来源:test_types_database.py


示例20: test_update_unit_key_single_field

    def test_update_unit_key_single_field(self):
        """
        Tests a single field unit key is handled correctly.
        """

        # Setup
        unit_key = 'individual_1',
        type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', unit_key, None, [])

        # Test
        types_db._update_unit_key(type_def)

        # Verify
        collection_name = types_db.unit_collection_name(type_def.id)
        collection = pulp_db.get_collection(collection_name)

        index_dict = collection.index_information()

        self.assertEqual(2, len(index_dict)) # default (_id) + unit key

        index = index_dict['individual_1_1']
        self.assertTrue(index['unique'])

        keys = index['key']
        self.assertEqual(1, len(keys))
        self.assertEqual('individual_1', keys[0][0])
        self.assertEqual(types_db.ASCENDING, keys[0][1])
开发者ID:ashcrow,项目名称:pulp,代码行数:27,代码来源:test_types_database.py



注:本文中的pulp.server.db.connection.get_collection函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python connection.get_database函数代码示例发布时间:2022-05-25
下一篇:
Python repository.find_repo_content_units函数代码示例发布时间:2022-05-25
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap