Read the url in in chunks so that we don't use as much memory for large packages

pull/18777/head
Toshio Kuratomi 9 years ago committed by Matt Clay
parent 2e7dcf3317
commit bd87c11c2a

@ -152,6 +152,9 @@ EXAMPLES = '''
yum: name="@Development tools" state=present yum: name="@Development tools" state=present
''' '''
# 64k. Number of bytes to read at a time when manually downloading pkgs via a url
BUFSIZE = 65536
def_qf = "%{name}-%{version}-%{release}.%{arch}" def_qf = "%{name}-%{version}-%{release}.%{arch}"
def log(msg): def log(msg):
@ -526,9 +529,11 @@ def install(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos):
package = os.path.join(tempdir, str(pkg.rsplit('/', 1)[1])) package = os.path.join(tempdir, str(pkg.rsplit('/', 1)[1]))
try: try:
rsp, info = fetch_url(module, pkg) rsp, info = fetch_url(module, pkg)
data = rsp.read()
f = open(package, 'w') f = open(package, 'w')
f.write(data) data = rsp.read(BUFSIZE)
while data:
f.write(data)
data = rsp.read(BUFSIZE)
f.close() f.close()
pkg = package pkg = package
except Exception, e: except Exception, e:

Loading…
Cancel
Save