| 12345678910111213141516171819202122232425262728293031323334353637383940414243 |
- #!/usr/bin/env python
- # -*- coding: utf-8 -*-
- import requests
- from urllib.request import urlretrieve
- import os
- import argparse
- def http_down(project, down_list=[], proxy=""):
- git_api = "https://api.github.com/repos/{}/releases/latest".format(project)
- git_releases="https://github.com/{}/releases/download/".format(project)
- print("获取", git_api)
- r = requests.get(git_api, verify=False) #不检查证书
- # print(soup)
- latest_version = r.json()["tag_name"]
- print('Latest version:', latest_version)
- git_down_url=proxy+git_releases+latest_version+"/"
- print('git_down_url:', git_down_url)
- if len(down_list):
- if not os.path.exists(latest_version):
- os.makedirs(latest_version)
- for i in down_list:
- print('down {}...'.format(i))
- urlretrieve(git_down_url+i, latest_version+"/"+i)
- print("下载完成")
- if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("n", help="项目,譬如 yisier/nps")
- parser.add_argument("l", help="下载列表,譬如 windows_amd64_client.tar.gz,windows_amd64_server.tar.gz")
- parser.add_argument("-p", default=False, help="代理,譬如 https://ghproxy.com/")
- args = parser.parse_args()
- project = str(args.n)
- down_list = list_str = str(args.l).split(",")
- proxy = "https://ghproxy.com/"
- if args.p:
- proxy = str(args.p)
- http_down(project, down_list, proxy)
|