2929from sys import exit
3030from docopt import docopt
3131
32- __version__ = '1.0 .0'
32+ __version__ = '1.1 .0'
3333GH_API_BASE_URL = 'https://api.github.com'
34+ GH_REPO_ENDPOINT = GH_API_BASE_URL + '/repos/{}/{}'
3435GH_REPO_CONTENTS_ENDPOINT = GH_API_BASE_URL + '/repos/{}/{}/contents'
3536BASE_NORMALIZE_REGEX = re .compile (r'.*github\.com\/' )
3637
3738req = requests .Session ()
38- req .headers .update ({'User-Agent' : 'git.io/ghclone ' + __version__ })
39+ req .headers .update ({'User-Agent' : 'git.io/ghclone ' + __version__ })
40+
3941
4042def exit_with_m (m = 'An error occured' ):
4143 print (m )
4244 exit (1 )
4345
46+
4447def mkdir_p (path ):
4548 try :
4649 os .makedirs (path )
@@ -50,6 +53,7 @@ def mkdir_p(path):
5053 else :
5154 raise
5255
56+
5357def clone_file (download_url , file_path ):
5458 """
5559 Clones the file at the download_url to the file_path
@@ -58,38 +62,41 @@ def clone_file(download_url, file_path):
5862 try :
5963 r .raise_for_status ()
6064 except Exception as e :
61- exit_with_m ('Failed cloning ' + download_url , e )
65+ exit_with_m ('Failed to clone ' + download_url , e )
6266
6367 with open (file_path , 'wb' ) as fd :
6468 for chunk in r .iter_content (chunk_size = 128 ):
6569 fd .write (chunk )
6670
71+
6772def clone (base_url , path = None , ref = None ):
6873 """
6974 Recursively clones the path
7075 """
71- req_url = base_url if not path else os .path .join (base_url , path )
76+ if path :
77+ req_url = os .path .join (base_url , path )
78+ # Create path locally
79+ mkdir_p (path )
80+ else :
81+ req_url = base_url
82+
7283 # Get path metadata
7384 r = req .get (req_url ) if not ref else req .get (req_url , params = {'ref' : ref })
7485 try :
7586 r .raise_for_status ()
7687 except Exception as e :
77- exit_with_m ('Failed fetching metadata for ' + path , e )
88+ exit_with_m ('Failed to fetch metadata for ' + path , e )
7889 repo_data = r .json ()
7990
80- # Create path locally
81- mkdir_p (path )
82-
83- if isinstance (repo_data , list ):
84- # Recursively clone content
85- for item in repo_data :
86- if item ['type' ] == 'dir' :
87- # Fetch dir recursively
88- clone (base_url , item ['path' ], ref )
89- else :
90- # Fetch the file
91- clone_file (item ['download_url' ], item ['path' ])
92- print ('Cloned' , item ['path' ])
91+ # Recursively clone content
92+ for item in repo_data :
93+ if item ['type' ] == 'dir' :
94+ # Fetch dir recursively
95+ clone (base_url , item ['path' ], ref )
96+ else :
97+ # Fetch the file
98+ clone_file (item ['download_url' ], item ['path' ])
99+ # print('Cloned', item['path'])
93100
94101
95102###
@@ -105,17 +112,31 @@ def main():
105112 gh_url = arguments ['<url>' ]
106113 token = arguments ['--token' ]
107114 if token :
108- req .headers .update ({'Authorization' : 'token ' + token [0 ]})
115+ req .headers .update ({'Authorization' : 'token ' + token [0 ]})
109116 # Normalize & parse input
110- normal_gh_url = re .sub (BASE_NORMALIZE_REGEX , '' , gh_url ).replace ('/tree' , '' )
111- gh_url_comps = normal_gh_url .split ('/' )
112- user , repo = gh_url_comps [:2 ]
113- ref = gh_url_comps [2 ]
114- path = os .path .join (* gh_url_comps [3 :])
117+ normal_gh_url = re .sub (BASE_NORMALIZE_REGEX , '' ,
118+ gh_url ).replace ('/tree' , '' )
119+ gh_args = normal_gh_url .split ('/' )
120+ user , repo = gh_args [:2 ]
121+
122+ if len (gh_args ) > 2 :
123+ # Clone subdirectory
124+ ref = gh_args [2 ]
125+ path = os .path .join (* gh_args [3 :])
126+ print ("Cloning into '%s'..." % path )
127+ else :
128+ # Clone entire repo
129+ mkdir_p (repo )
130+ os .chdir (repo )
131+ ref = None
132+ path = None
133+ print ("Cloning into '%s'..." % repo )
134+
115135 api_req_url = GH_REPO_CONTENTS_ENDPOINT .format (user , repo )
116- print ( "Cloning into '%s'..." % path )
136+
117137 clone (api_req_url , path , ref )
118138 print ("done." )
119139
140+
120141if __name__ == '__main__' :
121142 main ()
0 commit comments