Merge branch 'tb/incremental-midx-part-3.1' into ps/remove-packfile-store-get-packs

* tb/incremental-midx-part-3.1: (64 commits)
  builtin/repack.c: clean up unused `#include`s
  repack: move `write_cruft_pack()` out of the builtin
  repack: move `write_filtered_pack()` out of the builtin
  repack: move `pack_kept_objects` to `struct pack_objects_args`
  repack: move `finish_pack_objects_cmd()` out of the builtin
  builtin/repack.c: pass `write_pack_opts` to `finish_pack_objects_cmd()`
  repack: extract `write_pack_opts_is_local()`
  repack: move `find_pack_prefix()` out of the builtin
  builtin/repack.c: use `write_pack_opts` within `write_cruft_pack()`
  builtin/repack.c: introduce `struct write_pack_opts`
  repack: 'write_midx_included_packs' API from the builtin
  builtin/repack.c: inline packs within `write_midx_included_packs()`
  builtin/repack.c: pass `repack_write_midx_opts` to `midx_included_packs`
  builtin/repack.c: inline `remove_redundant_bitmaps()`
  builtin/repack.c: reorder `remove_redundant_bitmaps()`
  repack: keep track of MIDX pack names using existing_packs
  builtin/repack.c: use a string_list for 'midx_pack_names'
  builtin/repack.c: extract opts struct for 'write_midx_included_packs()'
  builtin/repack.c: remove ref snapshotting from builtin
  repack: remove pack_geometry API from the builtin
  ...
This commit is contained in:
Junio C Hamano
2025-10-16 14:42:27 -07:00
40 changed files with 1858 additions and 1528 deletions

View File

@@ -53,7 +53,7 @@ static void download_batch(struct backfill_context *ctx)
* We likely have a new packfile. Add it to the packed list to
* avoid possible duplicate downloads of the same objects.
*/
reprepare_packed_git(ctx->repo);
odb_reprepare(ctx->repo->objects);
}
static int fill_missing_blobs(const char *path UNUSED,

View File

@@ -852,9 +852,10 @@ static void batch_each_object(struct batch_options *opt,
if (bitmap && !for_each_bitmapped_object(bitmap, &opt->objects_filter,
batch_one_object_bitmapped, &payload)) {
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *pack;
for (pack = get_all_packs(the_repository); pack; pack = pack->next) {
for (pack = packfile_store_get_all_packs(packs); pack; pack = pack->next) {
if (bitmap_index_contains_pack(bitmap, pack) ||
open_pack_index(pack))
continue;

View File

@@ -122,6 +122,7 @@ int cmd_count_objects(int argc,
count_loose, count_cruft, NULL, NULL);
if (verbose) {
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
unsigned long num_pack = 0;
off_t size_pack = 0;
@@ -129,7 +130,7 @@ int cmd_count_objects(int argc,
struct strbuf pack_buf = STRBUF_INIT;
struct strbuf garbage_buf = STRBUF_INIT;
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local)
continue;
if (open_pack_index(p))

View File

@@ -897,11 +897,11 @@ static void end_packfile(void)
idx_name = keep_pack(create_index());
/* Register the packfile with core git's machinery. */
new_p = add_packed_git(pack_data->repo, idx_name, strlen(idx_name), 1);
new_p = packfile_store_load_pack(pack_data->repo->objects->packfiles,
idx_name, 1);
if (!new_p)
die("core git rejected index %s", idx_name);
all_packs[pack_id] = new_p;
install_packed_git(the_repository, new_p);
free(idx_name);
/* Print the boundary */
@@ -952,6 +952,7 @@ static int store_object(
struct object_id *oidout,
uintmax_t mark)
{
struct packfile_store *packs = the_repository->objects->packfiles;
void *out, *delta;
struct object_entry *e;
unsigned char hdr[96];
@@ -975,7 +976,7 @@ static int store_object(
if (e->idx.offset) {
duplicate_count_by_type[type]++;
return 1;
} else if (find_oid_pack(&oid, get_all_packs(the_repository))) {
} else if (find_oid_pack(&oid, packfile_store_get_all_packs(packs))) {
e->type = type;
e->pack_id = MAX_PACK_ID;
e->idx.offset = 1; /* just not zero! */
@@ -1092,6 +1093,7 @@ static void truncate_pack(struct hashfile_checkpoint *checkpoint)
static void stream_blob(uintmax_t len, struct object_id *oidout, uintmax_t mark)
{
struct packfile_store *packs = the_repository->objects->packfiles;
size_t in_sz = 64 * 1024, out_sz = 64 * 1024;
unsigned char *in_buf = xmalloc(in_sz);
unsigned char *out_buf = xmalloc(out_sz);
@@ -1175,7 +1177,7 @@ static void stream_blob(uintmax_t len, struct object_id *oidout, uintmax_t mark)
duplicate_count_by_type[OBJ_BLOB]++;
truncate_pack(&checkpoint);
} else if (find_oid_pack(&oid, get_all_packs(the_repository))) {
} else if (find_oid_pack(&oid, packfile_store_get_all_packs(packs))) {
e->type = OBJ_BLOB;
e->pack_id = MAX_PACK_ID;
e->idx.offset = 1; /* just not zero! */

View File

@@ -867,19 +867,20 @@ static int mark_packed_for_connectivity(const struct object_id *oid,
static int check_pack_rev_indexes(struct repository *r, int show_progress)
{
struct packfile_store *packs = r->objects->packfiles;
struct progress *progress = NULL;
uint32_t pack_count = 0;
int res = 0;
if (show_progress) {
for (struct packed_git *p = get_all_packs(r); p; p = p->next)
for (struct packed_git *p = packfile_store_get_all_packs(packs); p; p = p->next)
pack_count++;
progress = start_delayed_progress(the_repository,
"Verifying reverse pack-indexes", pack_count);
pack_count = 0;
}
for (struct packed_git *p = get_all_packs(r); p; p = p->next) {
for (struct packed_git *p = packfile_store_get_all_packs(packs); p; p = p->next) {
int load_error = load_pack_revindex_from_disk(p);
if (load_error < 0) {
@@ -999,6 +1000,8 @@ int cmd_fsck(int argc,
for_each_packed_object(the_repository,
mark_packed_for_connectivity, NULL, 0);
} else {
struct packfile_store *packs = the_repository->objects->packfiles;
odb_prepare_alternates(the_repository->objects);
for (source = the_repository->objects->sources; source; source = source->next)
fsck_source(source);
@@ -1009,7 +1012,7 @@ int cmd_fsck(int argc,
struct progress *progress = NULL;
if (show_progress) {
for (p = get_all_packs(the_repository); p;
for (p = packfile_store_get_all_packs(packs); p;
p = p->next) {
if (open_pack_index(p))
continue;
@@ -1019,7 +1022,7 @@ int cmd_fsck(int argc,
progress = start_progress(the_repository,
_("Checking objects"), total);
}
for (p = get_all_packs(the_repository); p;
for (p = packfile_store_get_all_packs(packs); p;
p = p->next) {
/* verify gives error messages itself */
if (verify_pack(the_repository,

View File

@@ -487,9 +487,10 @@ static int too_many_loose_objects(struct gc_config *cfg)
static struct packed_git *find_base_packs(struct string_list *packs,
unsigned long limit)
{
struct packfile_store *packfiles = the_repository->objects->packfiles;
struct packed_git *p, *base = NULL;
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packfiles); p; p = p->next) {
if (!p->pack_local || p->is_cruft)
continue;
if (limit) {
@@ -508,13 +509,14 @@ static struct packed_git *find_base_packs(struct string_list *packs,
static int too_many_packs(struct gc_config *cfg)
{
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
int cnt;
if (cfg->gc_auto_pack_limit <= 0)
return 0;
for (cnt = 0, p = get_all_packs(the_repository); p; p = p->next) {
for (cnt = 0, p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local)
continue;
if (p->pack_keep)
@@ -1042,7 +1044,7 @@ int cmd_gc(int argc,
die(FAILED_RUN, "rerere");
report_garbage = report_pack_garbage;
reprepare_packed_git(the_repository);
odb_reprepare(the_repository->objects);
if (pack_garbage.nr > 0) {
close_object_store(the_repository->objects);
clean_pack_garbage();
@@ -1423,7 +1425,7 @@ static int incremental_repack_auto_condition(struct gc_config *cfg UNUSED)
if (incremental_repack_auto_limit < 0)
return 1;
for (p = get_packed_git(the_repository);
for (p = packfile_store_get_packs(the_repository->objects->packfiles);
count < incremental_repack_auto_limit && p;
p = p->next) {
if (!p->multi_pack_index)
@@ -1491,8 +1493,8 @@ static off_t get_auto_pack_size(void)
struct packed_git *p;
struct repository *r = the_repository;
reprepare_packed_git(r);
for (p = get_all_packs(r); p; p = p->next) {
odb_reprepare(r->objects);
for (p = packfile_store_get_all_packs(r->objects->packfiles); p; p = p->next) {
if (p->pack_size > max_size) {
second_largest_size = max_size;
max_size = p->pack_size;

View File

@@ -1214,7 +1214,7 @@ int cmd_grep(int argc,
if (recurse_submodules)
repo_read_gitmodules(the_repository, 1);
if (startup_info->have_repository)
(void)get_packed_git(the_repository);
(void)packfile_store_get_packs(the_repository->objects->packfiles);
start_threads(&opt);
} else {

View File

@@ -1640,13 +1640,9 @@ static void final(const char *final_pack_name, const char *curr_pack_name,
rename_tmp_packfile(&final_index_name, curr_index_name, &index_name,
hash, "idx", 1);
if (do_fsck_object) {
struct packed_git *p;
p = add_packed_git(the_repository, final_index_name,
strlen(final_index_name), 0);
if (p)
install_packed_git(the_repository, p);
}
if (do_fsck_object)
packfile_store_load_pack(the_repository->objects->packfiles,
final_index_name, 0);
if (!from_stdin) {
printf("%s\n", hash_to_hex(hash));

View File

@@ -1748,12 +1748,12 @@ static int want_object_in_pack_mtime(const struct object_id *oid,
}
}
list_for_each(pos, get_packed_git_mru(the_repository)) {
list_for_each(pos, packfile_store_get_packs_mru(the_repository->objects->packfiles)) {
struct packed_git *p = list_entry(pos, struct packed_git, mru);
want = want_object_in_pack_one(p, oid, exclude, found_pack, found_offset, found_mtime);
if (!exclude && want > 0)
list_move(&p->mru,
get_packed_git_mru(the_repository));
packfile_store_get_packs_mru(the_repository->objects->packfiles));
if (want != -1)
return want;
}
@@ -3831,6 +3831,7 @@ static int pack_mtime_cmp(const void *_a, const void *_b)
static void read_packs_list_from_stdin(struct rev_info *revs)
{
struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct string_list include_packs = STRING_LIST_INIT_DUP;
struct string_list exclude_packs = STRING_LIST_INIT_DUP;
@@ -3855,7 +3856,7 @@ static void read_packs_list_from_stdin(struct rev_info *revs)
string_list_sort(&exclude_packs);
string_list_remove_duplicates(&exclude_packs, 0);
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
const char *pack_name = pack_basename(p);
if ((item = string_list_lookup(&include_packs, pack_name)))
@@ -4076,6 +4077,7 @@ static void enumerate_cruft_objects(void)
static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs)
{
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
struct rev_info revs;
int ret;
@@ -4105,7 +4107,7 @@ static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs
* Re-mark only the fresh packs as kept so that objects in
* unknown packs do not halt the reachability traversal early.
*/
for (p = get_all_packs(the_repository); p; p = p->next)
for (p = packfile_store_get_all_packs(packs); p; p = p->next)
p->pack_keep_in_core = 0;
mark_pack_kept_in_core(fresh_packs, 1);
@@ -4122,6 +4124,7 @@ static void enumerate_and_traverse_cruft_objects(struct string_list *fresh_packs
static void read_cruft_objects(void)
{
struct packfile_store *packs = the_repository->objects->packfiles;
struct strbuf buf = STRBUF_INIT;
struct string_list discard_packs = STRING_LIST_INIT_DUP;
struct string_list fresh_packs = STRING_LIST_INIT_DUP;
@@ -4142,7 +4145,7 @@ static void read_cruft_objects(void)
string_list_sort(&discard_packs);
string_list_sort(&fresh_packs);
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
const char *pack_name = pack_basename(p);
struct string_list_item *item;
@@ -4390,11 +4393,12 @@ static void add_unreachable_loose_objects(struct rev_info *revs)
static int has_sha1_pack_kept_or_nonlocal(const struct object_id *oid)
{
struct packfile_store *packs = the_repository->objects->packfiles;
static struct packed_git *last_found = (void *)1;
struct packed_git *p;
p = (last_found != (void *)1) ? last_found :
get_all_packs(the_repository);
packfile_store_get_all_packs(packs);
while (p) {
if ((!p->pack_local || p->pack_keep ||
@@ -4404,7 +4408,7 @@ static int has_sha1_pack_kept_or_nonlocal(const struct object_id *oid)
return 1;
}
if (p == last_found)
p = get_all_packs(the_repository);
p = packfile_store_get_all_packs(packs);
else
p = p->next;
if (p == last_found)
@@ -4436,12 +4440,13 @@ static int loosened_object_can_be_discarded(const struct object_id *oid,
static void loosen_unused_packed_objects(void)
{
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
uint32_t i;
uint32_t loosened_objects_nr = 0;
struct object_id oid;
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local || p->pack_keep || p->pack_keep_in_core)
continue;
@@ -4742,12 +4747,13 @@ static void get_object_list(struct rev_info *revs, struct strvec *argv)
static void add_extra_kept_packs(const struct string_list *names)
{
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
if (!names->nr)
return;
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
const char *name = basename(p->pack_name);
int i;
@@ -5185,8 +5191,10 @@ int cmd_pack_objects(int argc,
add_extra_kept_packs(&keep_pack_list);
if (ignore_packed_keep_on_disk) {
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
for (p = get_all_packs(the_repository); p; p = p->next)
for (p = packfile_store_get_all_packs(packs); p; p = p->next)
if (p->pack_local && p->pack_keep)
break;
if (!p) /* no keep-able packs found */
@@ -5198,8 +5206,10 @@ int cmd_pack_objects(int argc,
* want to unset "local" based on looking at packs, as
* it also covers non-local objects
*/
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p;
for (p = get_all_packs(the_repository); p; p = p->next) {
for (p = packfile_store_get_all_packs(packs); p; p = p->next) {
if (!p->pack_local) {
have_non_local_packs = 1;
break;

View File

@@ -566,7 +566,8 @@ static struct pack_list * add_pack(struct packed_git *p)
static struct pack_list * add_pack_file(const char *filename)
{
struct packed_git *p = get_all_packs(the_repository);
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p = packfile_store_get_all_packs(packs);
if (strlen(filename) < 40)
die("Bad pack filename: %s", filename);
@@ -581,7 +582,8 @@ static struct pack_list * add_pack_file(const char *filename)
static void load_all(void)
{
struct packed_git *p = get_all_packs(the_repository);
struct packfile_store *packs = the_repository->objects->packfiles;
struct packed_git *p = packfile_store_get_all_packs(packs);
while (p) {
add_pack(p);

View File

@@ -2389,7 +2389,7 @@ static const char *unpack(int err_fd, struct shallow_info *si)
status = finish_command(&child);
if (status)
return "index-pack abnormal exit";
reprepare_packed_git(the_repository);
odb_reprepare(the_repository->objects);
}
return NULL;
}

File diff suppressed because it is too large Load Diff