1+ use async_recursion:: async_recursion;
2+ use async_trait:: async_trait;
3+ use futures:: StreamExt ;
14use std:: {
25 collections:: { HashMap , HashSet } ,
36 path:: PathBuf ,
@@ -7,9 +10,6 @@ use std::{
710 atomic:: { AtomicUsize , Ordering } ,
811 } ,
912} ;
10-
11- use async_trait:: async_trait;
12- use futures:: StreamExt ;
1313use tokio:: sync:: {
1414 Mutex ,
1515 mpsc:: { self } ,
@@ -18,6 +18,7 @@ use tokio_stream::wrappers::ReceiverStream;
1818
1919use callisto:: { mega_tree, raw_blob, sea_orm_active_enums:: RefTypeEnum } ;
2020use common:: errors:: MegaError ;
21+ use git_internal:: internal:: metadata:: { EntryMeta , MetaAttached } ;
2122use git_internal:: {
2223 errors:: GitError ,
2324 internal:: {
@@ -65,14 +66,23 @@ impl RepoHandler for ImportRepo {
6566
6667 async fn post_receive_pack ( & self ) -> Result < ( ) , MegaError > {
6768 let _guard = self . shared . lock ( ) . await ;
69+ self . traverses_tree_and_update_filepath ( ) . await ?;
6870 self . attach_to_monorepo_parent ( ) . await
6971 }
7072
71- async fn save_entry ( & self , entry_list : Vec < Entry > ) -> Result < ( ) , MegaError > {
73+ async fn save_entry (
74+ & self ,
75+ entry_list : Vec < MetaAttached < Entry , EntryMeta > > ,
76+ ) -> Result < ( ) , MegaError > {
7277 let storage = self . storage . git_db_storage ( ) ;
7378 storage. save_entry ( self . repo . repo_id , entry_list) . await
7479 }
7580
81+ async fn update_pack_id ( & self , temp_pack_id : & str , pack_id : & str ) -> Result < ( ) , MegaError > {
82+ let storage = self . storage . git_db_storage ( ) ;
83+ storage. update_pack_id ( temp_pack_id, pack_id) . await
84+ }
85+
7686 async fn check_entry ( & self , _: & Entry ) -> Result < ( ) , GitError > {
7787 Ok ( ( ) )
7888 }
@@ -96,7 +106,10 @@ impl RepoHandler for ImportRepo {
96106 match model {
97107 Ok ( m) => {
98108 let c: Commit = Commit :: from_git_model ( m) ;
99- let entry = c. into ( ) ;
109+ let entry = MetaAttached {
110+ inner : c. into ( ) ,
111+ meta : EntryMeta :: new ( ) ,
112+ } ;
100113 entry_tx. send ( entry) . await . unwrap ( ) ;
101114 }
102115 Err ( err) => eprintln ! ( "Error: {err:?}" ) ,
@@ -109,7 +122,10 @@ impl RepoHandler for ImportRepo {
109122 match model {
110123 Ok ( m) => {
111124 let t: Tree = Tree :: from_git_model ( m) ;
112- let entry = t. into ( ) ;
125+ let entry = MetaAttached {
126+ inner : t. into ( ) ,
127+ meta : EntryMeta :: new ( ) ,
128+ } ;
113129 entry_tx. send ( entry) . await . unwrap ( ) ;
114130 }
115131 Err ( err) => eprintln ! ( "Error: {err:?}" ) ,
@@ -139,7 +155,26 @@ impl RepoHandler for ImportRepo {
139155 // TODO handle storage type
140156 let data = m. data . unwrap_or_default ( ) ;
141157 let b: Blob = Blob :: from_content_bytes ( data) ;
142- let entry: Entry = b. into ( ) ;
158+ // let blob_with_data = storage.get_blobs_by_hashes(repo_id,vec![b.id.to_string()]).await?.iter().next().unwrap();
159+ let blob_with_data = storage
160+ . get_blobs_by_hashes ( repo_id, vec ! [ b. id. to_string( ) ] )
161+ . await
162+ . expect ( "get_blobs_by_hashes failed" )
163+ . into_iter ( )
164+ . next ( )
165+ . expect ( "blob metadata not found" ) ;
166+
167+ let meta_data = EntryMeta {
168+ pack_id : Some ( blob_with_data. pack_id . clone ( ) ) ,
169+ pack_offset : Some ( blob_with_data. pack_offset as usize ) ,
170+ file_path : Some ( blob_with_data. file_path . clone ( ) ) ,
171+ is_delta : Some ( blob_with_data. is_delta_in_pack ) ,
172+ } ;
173+
174+ let entry = MetaAttached {
175+ inner : b. into ( ) ,
176+ meta : meta_data,
177+ } ;
143178 sender_clone. send ( entry) . await . unwrap ( ) ;
144179 }
145180 Err ( err) => eprintln ! ( "Error: {err:?}" ) ,
@@ -154,7 +189,10 @@ impl RepoHandler for ImportRepo {
154189 let tags = storage. get_tags_by_repo_id ( repo_id) . await . unwrap ( ) ;
155190 for m in tags. into_iter ( ) {
156191 let c: Tag = Tag :: from_git_model ( m) ;
157- let entry: Entry = c. into ( ) ;
192+ let entry = MetaAttached {
193+ inner : c. into ( ) ,
194+ meta : EntryMeta :: new ( ) ,
195+ } ;
158196 entry_tx. send ( entry) . await . unwrap ( ) ;
159197 }
160198 drop ( entry_tx) ;
@@ -256,7 +294,13 @@ impl RepoHandler for ImportRepo {
256294 Some ( & entry_tx) ,
257295 )
258296 . await ;
259- entry_tx. send ( c. into ( ) ) . await . unwrap ( ) ;
297+ entry_tx
298+ . send ( MetaAttached {
299+ inner : c. into ( ) ,
300+ meta : EntryMeta :: new ( ) ,
301+ } )
302+ . await
303+ . unwrap ( ) ;
260304 }
261305 drop ( entry_tx) ;
262306
@@ -285,6 +329,34 @@ impl RepoHandler for ImportRepo {
285329 . await
286330 }
287331
332+ async fn get_blob_metadata_by_hashes (
333+ & self ,
334+ hashes : Vec < String > ,
335+ ) -> Result < HashMap < String , EntryMeta > , MegaError > {
336+ let models = self
337+ . storage
338+ . git_db_storage ( )
339+ . get_blobs_by_hashes ( self . repo . repo_id , hashes)
340+ . await ?;
341+
342+ let map = models
343+ . into_iter ( )
344+ . map ( |blob| {
345+ (
346+ blob. blob_id . clone ( ) ,
347+ EntryMeta {
348+ pack_id : Some ( blob. pack_id . clone ( ) ) ,
349+ pack_offset : Some ( blob. pack_offset as usize ) ,
350+ file_path : Some ( blob. file_path . clone ( ) ) ,
351+ is_delta : Some ( blob. is_delta_in_pack ) ,
352+ } ,
353+ )
354+ } )
355+ . collect :: < HashMap < String , EntryMeta > > ( ) ;
356+
357+ Ok ( map)
358+ }
359+
288360 async fn update_refs ( & self , refs : & RefCommand ) -> Result < ( ) , GitError > {
289361 let storage = self . storage . git_db_storage ( ) ;
290362 match refs. command_type {
@@ -324,9 +396,65 @@ impl RepoHandler for ImportRepo {
324396 . await
325397 . unwrap ( )
326398 }
399+
400+ async fn traverses_tree_and_update_filepath ( & self ) -> Result < ( ) , MegaError > {
401+ //let (current_head, refs) = self.head_hash().await;
402+ let ( current_head, _refs) = self . refs_with_head_hash ( ) . await ;
403+ let commit = Commit :: from_git_model (
404+ self . storage
405+ . git_db_storage ( )
406+ . get_commit_by_hash ( self . repo . repo_id , & current_head)
407+ . await ?
408+ . unwrap ( ) ,
409+ ) ;
410+
411+ let root_tree = Tree :: from_git_model (
412+ self . storage
413+ . git_db_storage ( )
414+ . get_tree_by_hash ( self . repo . repo_id , & commit. tree_id . to_string ( ) )
415+ . await ?
416+ . unwrap ( )
417+ . clone ( ) ,
418+ ) ;
419+ self . traverses_and_update_filepath ( root_tree, PathBuf :: new ( ) )
420+ . await ?;
421+ Ok ( ( ) )
422+ }
327423}
328424
329425impl ImportRepo {
426+ #[ async_recursion]
427+ async fn traverses_and_update_filepath (
428+ & self ,
429+ tree : Tree ,
430+ path : PathBuf ,
431+ ) -> Result < ( ) , MegaError > {
432+ for item in tree. tree_items {
433+ if item. is_tree ( ) {
434+ let tree = Tree :: from_git_model (
435+ self . storage
436+ . git_db_storage ( )
437+ . get_tree_by_hash ( self . repo . repo_id , & item. id . to_string ( ) )
438+ . await ?
439+ . unwrap ( )
440+ . clone ( ) ,
441+ ) ;
442+
443+ // 递归调用
444+ self . traverses_and_update_filepath ( tree, path. join ( item. name ) )
445+ . await ?;
446+ } else {
447+ let id = item. id . to_string ( ) ;
448+ self . storage
449+ . git_db_storage ( )
450+ . update_git_blob_filepath ( & id, path. join ( item. name ) . to_str ( ) . unwrap ( ) )
451+ . await ?;
452+ }
453+ }
454+
455+ Ok ( ( ) )
456+ }
457+
330458 // attach import repo to monorepo parent tree
331459 pub ( crate ) async fn attach_to_monorepo_parent ( & self ) -> Result < ( ) , MegaError > {
332460 // 1. find branch command
@@ -379,7 +507,7 @@ impl ImportRepo {
379507 let save_trees: Vec < mega_tree:: ActiveModel > = save_trees
380508 . into_iter ( )
381509 . map ( |tree| {
382- let mut model: mega_tree:: Model = tree. into_mega_model ( ) ;
510+ let mut model: mega_tree:: Model = tree. into_mega_model ( EntryMeta :: new ( ) ) ;
383511 model. commit_id = new_commit. id . to_string ( ) ;
384512 model. into ( )
385513 } )
0 commit comments