代码拉取完成,页面将自动刷新
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sys import argv, exit
import re
from glob import glob
from urllib import urlopen, urlretrieve, unquote
from shutil import move
from os import remove, path
import argparse
try:
from bs4 import BeautifulSoup as Soup
except ImportError:
Soup = None
exit("""You need beautifulsoup4!
install it from https://www.crummy.com/software/BeautifulSoup/
or run pip install beautifulsoup4.""")
centos_version = '7.7.1908'
docker_repo = 'https://mirrors.aliyun.com/docker-ce/linux/centos/7/x86_64/stable/Packages/'
docker_compose_repo = 'https://github.com/docker/compose/releases/latest'
rep_mirror_url = 'http://mirrors.aliyun.com/centos'
package_store = 'os'
package_local = {'common': './packages/common/',
'docker': './packages/'}
COMMON_VERSION_REX = r"-([\d\.\-]+)\.el7.*\.(x86_64|noarch)\.rpm"
COMMON_GET_NAME_REX = r"([a-zA-Z\-]+)-\d+.+"
_is_test = False
def _get_version_number(s):
m = re.search(r'\d+.+-\d', s)
return re.sub('[.-]', '', m.group(0))
def _get_file_name_by_url(url):
return url[url.rfind('/') + 1:]
def __gen_repository_dir(store_name='os'):
if store_name == 'base':
store_name = 'os'
return '%s/%s/%s/x86_64/Packages/' % (rep_mirror_url, centos_version, store_name)
class __Rpm:
def __init__(self, name, repository='base', arch='x86_64', version_rex=COMMON_VERSION_REX,
name_rex=COMMON_GET_NAME_REX):
self.__name = name
self.__repository = repository
self.__arch = arch
self.__version_rex = version_rex
self.__get_name_rex = name_rex
self.__url = None
self.__current_version = 0
self.__existed_package_name = None
self.__existed_version = 0
self.__existed_package_path = None
@property
def name(self):
return self.__name
@property
def repository(self):
return self.__repository
@property
def arch(self):
return self.__arch
@property
def version_rex(self):
return self.__version_rex
@property
def url(self):
return self.__url
def set_url(self, value):
self.__current_version = self.__get_version(self.version_rex, _get_file_name_by_url(value))
self.__url = value
@property
def existed_package_name(self):
return self.__existed_package_name
def set_existed_package_name(self, value):
self.__existed_version = self.__get_version(self.version_rex, _get_file_name_by_url(value))
self.__existed_package_name = value
self.__existed_package_path = package_local['common'] + value
@property
def existed_package_path(self):
return self.__existed_package_path
@staticmethod
def __get_version(rex, s):
m = re.search(rex, s)
if _is_test and (m is None or len(m.groups()) == 0):
print("cant find version for: " + s)
version_str = m.group(1) if m is not None and len(m.groups()) > 0 else 0
return int(_get_version_number(version_str) if version_str != 0 else 0)
@property
def current_version(self):
return self.__current_version
@property
def existed_version(self):
return self.__existed_version
def has_new_package(self):
return self.url is not None and (
self.existed_version == 0 or self.current_version > self.existed_version)
def has_old_package(self):
return True if self.__existed_package_name is not None else False
def is_name_match(self, value):
match = re.match(self.__get_name_rex, value)
if match is None:
return False
s = match.group(1)
return self.name == s
def to_string(self):
return """
name: %s
url: %s
version: %s
exist file: %s
old version: %s
has new: %s""" % (self.__name,
self.url if self.url is not None else '',
self.current_version,
str(self.existed_package_path if self.has_old_package else ''),
self.existed_version,
str(self.has_new_package()))
__docker_require_packages = [
__Rpm('audit-libs-python'),
__Rpm('checkpolicy'),
__Rpm('container-selinux', 'extras', 'noarch'),
__Rpm('libcgroup'),
__Rpm('libsemanage-python'),
__Rpm('libtool-ltdl'),
__Rpm('policycoreutils-python'),
__Rpm('python-IPy', 'base', 'noarch'),
__Rpm('setools-libs'),
]
__docker_require_package_names = []
for o in __docker_require_packages:
__docker_require_package_names.append(o.name)
__repo_url_dict = {}
def __gen_repository_soup(store_name='os'):
return Soup(urlopen(__gen_repository_dir(store_name)), "html.parser")
# 获取全部url
def __get_repository_all_match_urls(store_name='os'):
if store_name in __repo_url_dict:
return __repo_url_dict[store_name]
package_repo_soup = __gen_repository_soup(store_name)
def is_match_package_name(package_name, href_str):
return re.match(package_name + r".+(x86_64|noarch)\.rpm", href_str)
arr = __docker_require_package_names
def is_starts_with_in_array_items(s):
return any([is_match_package_name(x, s) for x in arr])
os_tags = package_repo_soup.find_all('a', text=is_starts_with_in_array_items)
hrefs = set([tag.get('href') for tag in os_tags])
__repo_url_dict[store_name] = hrefs
return hrefs
def __get_package_url(rpm_):
repo_all_match_urls = __get_repository_all_match_urls(rpm_.repository)
package_url = None
package_url_rex = rpm_.name + rpm_.version_rex
for s in repo_all_match_urls:
if re.match(package_url_rex, s):
package_url = unquote(__gen_repository_dir(rpm_.repository) + s)
break
if _is_test and package_url is None:
print('cant found for ' + package_url_rex)
return package_url
# 获取docker依赖包的下载地址
def get_download_docker_require_packages():
if _is_test:
print('begin to find docker require packages')
for package_info in __docker_require_packages:
package_url = __get_package_url(package_info)
if package_url is not None:
# if _is_test:
# print("found: " + package_url)
package_info.set_url(package_url)
else:
print('cant find package for ' + package_info.name)
return __docker_require_packages
def get_simplify_package_name(s):
return re.search(r'([^\d]+)-', s).group(0)[:-1]
def get_exist_common_files(packages):
a = glob(package_local['common'] + "*.rpm")
names = [_get_file_name_by_url(s) for s in a]
for package in packages:
exists = []
for s in names:
if package.is_name_match(s):
exists.append(s)
if len(exists) > 1:
for s in exists:
print('get exist file %s: %s' % package.name, s)
raise RuntimeError('匹配到多个已存在的文件,请检查正则表达式')
elif len(exists) == 1:
package.set_existed_package_name(exists[0])
return packages
# fileurl: 新包的下载地址
# filename: 文件名
# filedir: 存放的文件夹路径
# old_filename: 旧文件名
def update_local_package(fileurl, filename, filedir, old_filename=None, is_remove_old_file=True):
if path.isfile(filename):
remove(filename)
urlretrieve(fileurl, filename)
if path.isfile(filedir + filename):
remove(filedir + filename)
move(filename, filedir)
if old_filename is not None and is_remove_old_file and path.isfile(filedir + old_filename):
remove(filedir + old_filename)
def get_download_docker_packages_urls():
soup = Soup(urlopen(docker_repo), "html.parser")
tags = soup.find_all('a', text=re.compile(r'docker-ce-\d+.+x86_64.rpm'))
hrefs = [tag.get('href') for tag in tags]
href_set = set(hrefs)
return href_set
def clean_local_package_by_prefix(filedir, prefix, suffix='*.rpm'):
exist_packages = glob(filedir + suffix)
names = [exist_package[exist_package.rfind('/') + 1:] for exist_package in exist_packages]
for old_package in names:
if old_package.startswith(prefix):
remove(filedir + old_package)
def update_docker_require_packages():
packages = get_download_docker_require_packages()
if len(packages) == 0:
return False
packages = get_exist_common_files(packages)
for package in packages:
if _is_test:
print(package.to_string())
continue
if package.has_new_package():
print('update: ' + package.url)
if package.existed_package_name is not None:
print('delete: ' + package.existed_package_name)
update_local_package(package.url, _get_file_name_by_url(package.url), package_local['common'],
package.existed_package_name)
def update_docker():
print('to update docker-ce...')
hrefs = get_download_docker_packages_urls()
def to_version(s):
m = re.search(r'\d+[^a-zA-Z]+', s)
return re.sub('[.-]', '', m.group(0))
docker_ce_latest_package = None
for href in hrefs:
cv = to_version(href)
if docker_ce_latest_package is None or cv > to_version(docker_ce_latest_package):
docker_ce_latest_package = href
docker_ce_cli_latest_package = docker_ce_latest_package.replace('docker-ce-', 'docker-ce-cli-')
if path.isfile(package_local['docker'] + docker_ce_latest_package):
print('latest version %s was existed' % docker_ce_latest_package)
return False
clean_local_package_by_prefix(package_local['docker'], 'docker-ce-')
docker_ce_url = docker_repo + docker_ce_latest_package
docker_ce_cli_url = docker_repo + docker_ce_cli_latest_package
# FIXME 偷下懒 containerd.io 暂时不支持自动更新 2019-10-11
# docker-ce
update_local_package(docker_ce_url, docker_ce_latest_package, package_local['docker'])
# docker-ce-cli
update_local_package(docker_ce_cli_url, docker_ce_cli_latest_package, package_local['docker'])
def update_docker_compose():
print('to update docker-compose...')
package_url_pattern = 'https://github.com/docker/compose/releases/download/%s/docker-compose-Linux-x86_64'
latest_url = urlopen(docker_compose_repo).geturl()
index = latest_url.rindex("/")
latest_version = latest_url[index + 1:]
filename = 'docker-compose-Linux-x86_64-' + latest_version
if path.isfile(package_local['docker'] + filename):
print('latest version %s was existed' % filename)
return False
url = package_url_pattern % latest_version
clean_local_package_by_prefix(package_local['docker'], 'docker-compose-Linux-x86_64', '*')
update_local_package(url, filename, package_local['docker'])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--test', action="store_true", help="测试离线包信息,只打印动作,没有任何实际信息")
args, unknown = parser.parse_known_args()
if args.test:
_is_test = True
update_docker_require_packages()
if not _is_test:
print('update docker require packages was done.')
update_docker()
print('update docker-ce was done.')
update_docker_compose()
print('update docker-compose was done.')
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。