if (bitmap && !for_each_bitmapped_object(bitmap, &opt->objects_filter,
batch_one_object_bitmapped, &payload)) {
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *pack;
- for (pack = packfile_store_get_all_packs(packs); pack; pack = pack->next) {
+ repo_for_each_pack(the_repository, pack) {
if (bitmap_index_contains_pack(bitmap, pack) ||
open_pack_index(pack))
continue;
count_loose, count_cruft, NULL, NULL);
if (verbose) {
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
unsigned long num_pack = 0;
off_t size_pack = 0;
struct strbuf pack_buf = STRBUF_INIT;
struct strbuf garbage_buf = STRBUF_INIT;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (!p->pack_local)
continue;
if (open_pack_index(p))
static int check_pack_rev_indexes(struct repository *r, int show_progress)
{
- struct packfile_store *packs = r->objects->packfiles;
struct progress *progress = NULL;
+ struct packed_git *p;
uint32_t pack_count = 0;
int res = 0;
if (show_progress) {
- for (struct packed_git *p = packfile_store_get_all_packs(packs); p; p = p->next)
+ repo_for_each_pack(r, p)
pack_count++;
progress = start_delayed_progress(the_repository,
"Verifying reverse pack-indexes", pack_count);
pack_count = 0;
}
- for (struct packed_git *p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(r, p) {
int load_error = load_pack_revindex_from_disk(p);
if (load_error < 0) {
for_each_packed_object(the_repository,
mark_packed_for_connectivity, NULL, 0);
} else {
- struct packfile_store *packs = the_repository->objects->packfiles;
-
odb_prepare_alternates(the_repository->objects);
for (source = the_repository->objects->sources; source; source = source->next)
fsck_source(source);
struct progress *progress = NULL;
if (show_progress) {
- for (p = packfile_store_get_all_packs(packs); p;
- p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (open_pack_index(p))
continue;
total += p->num_objects;
progress = start_progress(the_repository,
_("Checking objects"), total);
}
- for (p = packfile_store_get_all_packs(packs); p;
- p = p->next) {
+
+ repo_for_each_pack(the_repository, p) {
/* verify gives error messages itself */
if (verify_pack(the_repository,
p, fsck_obj_buffer,
static struct packed_git *find_base_packs(struct string_list *packs,
unsigned long limit)
{
- struct packfile_store *packfiles = the_repository->objects->packfiles;
struct packed_git *p, *base = NULL;
- for (p = packfile_store_get_all_packs(packfiles); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (!p->pack_local || p->is_cruft)
continue;
if (limit) {
static int too_many_packs(struct gc_config *cfg)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- int cnt;
+ int cnt = 0;
if (cfg->gc_auto_pack_limit <= 0)
return 0;
- for (cnt = 0, p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (!p->pack_local)
continue;
if (p->pack_keep)
if (incremental_repack_auto_limit < 0)
return 1;
- for (p = packfile_store_get_all_packs(the_repository->objects->packfiles);
- count < incremental_repack_auto_limit && p;
- p = p->next) {
+ repo_for_each_pack(the_repository, p) {
+ if (count >= incremental_repack_auto_limit)
+ break;
if (!p->multi_pack_index)
count++;
}
struct repository *r = the_repository;
odb_reprepare(r->objects);
- for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
+ repo_for_each_pack(r, p) {
if (p->pack_size > max_size) {
second_largest_size = max_size;
max_size = p->pack_size;
static void read_packs_list_from_stdin(struct rev_info *revs)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct string_list include_packs = STRING_LIST_INIT_DUP;
struct string_list exclude_packs = STRING_LIST_INIT_DUP;
struct string_list_item *item = NULL;
-
struct packed_git *p;
while (strbuf_getline(&buf, stdin) != EOF) {
string_list_sort(&exclude_packs);
string_list_remove_duplicates(&exclude_packs, 0);
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
const char *pack_name = pack_basename(p);
if ((item = string_list_lookup(&include_packs, pack_name)))
static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
struct rev_info revs;
int ret;
* Re-mark only the fresh packs as kept so that objects in
* unknown packs do not halt the reachability traversal early.
*/
- for (p = packfile_store_get_all_packs(packs); p; p = p->next)
+ repo_for_each_pack(the_repository, p)
p->pack_keep_in_core = 0;
mark_pack_kept_in_core(fresh_packs, 1);
static void read_cruft_objects(void)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct string_list discard_packs = STRING_LIST_INIT_DUP;
struct string_list fresh_packs = STRING_LIST_INIT_DUP;
string_list_sort(&discard_packs);
string_list_sort(&fresh_packs);
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
const char *pack_name = pack_basename(p);
struct string_list_item *item;
static void loosen_unused_packed_objects(void)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
uint32_t i;
uint32_t loosened_objects_nr = 0;
struct object_id oid;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (!p->pack_local || p->pack_keep || p->pack_keep_in_core)
continue;
static void add_extra_kept_packs(const struct string_list *names)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
if (!names->nr)
return;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
const char *name = basename(p->pack_name);
int i;
add_extra_kept_packs(&keep_pack_list);
if (ignore_packed_keep_on_disk) {
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next)
+ repo_for_each_pack(the_repository, p)
if (p->pack_local && p->pack_keep)
break;
if (!p) /* no keep-able packs found */
* want to unset "local" based on looking at packs, as
* it also covers non-local objects
*/
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (!p->pack_local) {
have_non_local_packs = 1;
break;
static struct pack_list * add_pack_file(const char *filename)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
- struct packed_git *p = packfile_store_get_all_packs(packs);
+ struct packed_git *p;
if (strlen(filename) < 40)
die("Bad pack filename: %s", filename);
- while (p) {
+ repo_for_each_pack(the_repository, p)
if (strstr(p->pack_name, filename))
return add_pack(p);
- p = p->next;
- }
die("Filename %s not found in packed_git", filename);
}
static void load_all(void)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
- struct packed_git *p = packfile_store_get_all_packs(packs);
+ struct packed_git *p;
- while (p) {
+ repo_for_each_pack(the_repository, p)
add_pack(p);
- p = p->next;
- }
}
int cmd_pack_redundant(int argc, const char **argv, const char *prefix UNUSED, struct repository *repo UNUSED) {
*/
odb_reprepare(the_repository->objects);
do {
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (!p->pack_promisor)
continue;
if (find_pack_entry_one(oid, p))
static void get_info_packs(struct strbuf *hdr, char *arg UNUSED)
{
size_t objdirlen = strlen(repo_get_object_directory(the_repository));
- struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct packed_git *p;
size_t cnt = 0;
select_getanyfile(hdr);
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (p->pack_local)
cnt++;
}
strbuf_grow(&buf, cnt * 53 + 2);
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (p->pack_local)
strbuf_addf(&buf, "P %s\n", p->pack_name + objdirlen + 6);
}
static int fetch_and_setup_pack_index(struct packed_git **packs_head,
unsigned char *sha1, const char *base_url)
{
- struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *new_pack, *p;
char *tmp_idx = NULL;
int ret;
* If we already have the pack locally, no need to fetch its index or
* even add it to list; we already have all of its objects.
*/
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
if (hasheq(p->hash, sha1, the_repository->hash_algo))
return 0;
}
unique_in_midx(m, ds);
}
- for (p = packfile_store_get_all_packs(ds->repo->objects->packfiles); p && !ds->ambiguous;
- p = p->next)
+ repo_for_each_pack(ds->repo, p) {
+ if (ds->ambiguous)
+ break;
unique_in_pack(p, ds);
+ }
}
static int finish_object_disambiguation(struct disambiguate_state *ds,
find_abbrev_len_for_midx(m, mad);
}
- for (p = packfile_store_get_all_packs(mad->repo->objects->packfiles); p; p = p->next)
+ repo_for_each_pack(mad->repo, p)
find_abbrev_len_for_pack(p, mad);
}
struct packed_git *p;
int ret = -1;
- for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
+ repo_for_each_pack(r, p) {
if (open_pack_bitmap_1(bitmap_git, p) == 0) {
ret = 0;
/*
int verify_bitmap_files(struct repository *r)
{
struct odb_source *source;
+ struct packed_git *p;
int res = 0;
odb_prepare_alternates(r->objects);
free(midx_bitmap_name);
}
- for (struct packed_git *p = packfile_store_get_all_packs(r->objects->packfiles);
- p; p = p->next) {
+ repo_for_each_pack(r, p) {
char *pack_bitmap_name = pack_bitmap_filename(p);
res |= verify_bitmap_file(r->hash_algo, pack_bitmap_name);
free(pack_bitmap_name);
static void prepare_in_pack_by_idx(struct packing_data *pdata)
{
- struct packfile_store *packs = pdata->repo->objects->packfiles;
struct packed_git **mapping, *p;
int cnt = 0, nr = 1U << OE_IN_PACK_BITS;
* (i.e. in_pack_idx also zero) should return NULL.
*/
mapping[cnt++] = NULL;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next, cnt++) {
+ repo_for_each_pack(pdata->repo, p) {
if (cnt == nr) {
free(mapping);
return;
}
p->index = cnt;
- mapping[cnt] = p;
+ mapping[cnt++] = p;
}
pdata->in_pack_by_idx = mapping;
}
* covers, one kept and one not kept, but the midx returns only
* the non-kept version.
*/
- for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
+ repo_for_each_pack(r, p) {
if ((p->pack_keep && (flags & ON_DISK_KEEP_PACKS)) ||
(p->pack_keep_in_core && (flags & IN_CORE_KEEP_PACKS))) {
ALLOC_GROW(packs, nr + 1, alloc);
int r = 0;
int pack_errors = 0;
- for (p = packfile_store_get_all_packs(repo->objects->packfiles); p; p = p->next) {
+ repo_for_each_pack(repo, p) {
if ((flags & FOR_EACH_OBJECT_LOCAL_ONLY) && !p->pack_local)
continue;
if ((flags & FOR_EACH_OBJECT_PROMISOR_ONLY) &&
void packfile_store_add_pack(struct packfile_store *store,
struct packed_git *pack);
+/*
+ * Load and iterate through all packs of the given repository. This helper
+ * function will yield packfiles from all object sources connected to the
+ * repository.
+ */
+#define repo_for_each_pack(repo, p) \
+ for (p = packfile_store_get_all_packs(repo->objects->packfiles); p; p = p->next)
+
/*
* Get all packs managed by the given store, including packfiles that are
* referenced by multi-pack indices.
static void combine_small_cruft_packs(FILE *in, off_t combine_cruft_below_size,
struct existing_packs *existing)
{
- struct packfile_store *packs = existing->repo->objects->packfiles;
struct packed_git *p;
struct strbuf buf = STRBUF_INIT;
size_t i;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(existing->repo, p) {
if (!(p->is_cruft && p->pack_local))
continue;
struct existing_packs *existing,
const struct pack_objects_args *args)
{
- struct packfile_store *packs = existing->repo->objects->packfiles;
struct packed_git *p;
struct strbuf buf = STRBUF_INIT;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(existing->repo, p) {
if (args->local && !p->pack_local)
/*
* When asked to only repack local packfiles we skip
void existing_packs_collect(struct existing_packs *existing,
const struct string_list *extra_keep)
{
- struct packfile_store *packs = existing->repo->objects->packfiles;
struct packed_git *p;
struct strbuf buf = STRBUF_INIT;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(existing->repo, p) {
size_t i;
const char *base;
static void init_pack_info(struct repository *r, const char *infofile, int force)
{
- struct packfile_store *packs = r->objects->packfiles;
struct packed_git *p;
int stale;
int i;
size_t alloc = 0;
- for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
+ repo_for_each_pack(r, p) {
/* we ignore things on alternate path since they are
* not available to the pullers in general.
*/
if (repo_get_oid(the_repository, argv[0], &oid))
die("cannot parse %s as an object name", argv[0]);
- for (p = packfile_store_get_all_packs(the_repository->objects->packfiles); p; p = p->next)
+ repo_for_each_pack(the_repository, p) {
if (find_pack_entry_one(&oid, p)) {
printf("%s\n", p->pack_name);
actual_count++;
}
+ }
if (count > -1 && count != actual_count)
die("bad packfile count %d instead of %d", actual_count, count);
if (argc != 2)
usage(pack_mtimes_usage);
- for (p = packfile_store_get_all_packs(the_repository->objects->packfiles); p; p = p->next) {
+ repo_for_each_pack(the_repository, p) {
strbuf_addstr(&buf, basename(p->pack_name));
strbuf_strip_suffix(&buf, ".pack");
strbuf_addstr(&buf, ".mtimes");