]>
git.ipfire.org Git - pakfire.git/blob - python/pakfire/server.py
2 ###############################################################################
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
20 ###############################################################################
35 import pakfire
.downloader
36 import pakfire
.packages
37 import pakfire
.repository
40 from pakfire
.constants
import *
42 CHUNK_SIZE
= 1024**2 # 1M
45 def __init__(self
, pakfire
, id, name
, url
, path
, targetpath
, revision
, branch
):
46 self
.pakfire
= pakfire
50 self
.targetpath
= targetpath
51 self
.revision
= revision
54 # If the repository is not yet checked out, we create a local clone
55 # from it to work with it.
56 if not self
.is_cloned():
59 # Always refresh the repository to have the recent commits.
63 return os
.path
.exists(self
.path
)
69 dirname
= os
.path
.dirname(self
.path
)
70 basename
= os
.path
.basename(self
.path
)
72 if not os
.path
.exists(dirname
):
75 self
._git
("clone %s %s" % (self
.url
, basename
), path
=dirname
)
82 h
= hashlib
.sha1(self
.url
)
84 # XXX path is to be changed
85 return "/var/cache/pakfire/sources/%s" % h
.hexdigest()
87 def _git(self
, cmd
, path
=None):
91 cmd
= "cd %s && git %s" % (path
, cmd
)
93 logging
.debug("Running command: %s" % cmd
)
95 return subprocess
.check_output(["/bin/sh", "-c", cmd
])
97 def _git_changed_files(self
, revision1
, revision2
=""):
98 files
= self
._git
("diff --name-only %s %s" % (revision1
, revision2
))
100 return [os
.path
.join(self
.path
, f
) for f
in files
.splitlines()]
102 def _git_checkout_revision(self
, revision
):
103 self
._git
("checkout %s" % revision
)
105 def update_revision(self
, revision
, **pakfire_args
):
106 # Checkout the revision we want to work with.
107 self
._git
_checkout
_revision
(revision
)
109 # Get list of all changes files between the current revision and
111 files
= self
._git
_changed
_files
("HEAD^", "HEAD")
113 # Update all changed files and return a repository with them.
114 return self
.update_files([f
for f
in files
if f
.endswith(".%s" % MAKEFILE_EXTENSION
)],
117 def update_files(self
, files
, **pakfire_args
):
118 rnd
= random
.randint(0, 1024**2)
119 tmpdir
= "/tmp/pakfire-source-%s" % rnd
123 if os
.path
.exists(file):
125 # XXX not sure what to do here
127 # pkg_name = os.path.basename(os.path.dirname(file))
129 # # Send deleted package to server.
130 # self.master.package_remove(self, pkg_name)
135 # XXX This totally ignores the local configuration.
136 pakfire
.api
.dist(pkgs
, resultdirs
=[tmpdir
,], **pakfire_args
)
138 # Create a kind of dummy repository to link the packages against it.
139 if pakfire_args
.has_key("build_id"):
140 del pakfire_args
["build_id"]
141 pakfire_args
["mode"] = "server"
143 repo
= pakfire
.api
.repo_create("source-%s" % rnd
, [tmpdir
,], type="source",
148 def update_all(self
):
150 for dir, subdirs
, files
in os
.walk(self
.path
):
152 if not f
.endswith(".%s" % MAKEFILE_EXTENSION
):
155 _files
.append(os
.path
.join(dir, f
))
157 return self
.update_files(_files
)
160 class XMLRPCTransport(xmlrpclib
.Transport
):
161 user_agent
= "pakfire/%s" % PAKFIRE_VERSION
163 def single_request(self
, *args
, **kwargs
):
166 # Tries can be passed to this method.
167 tries
= kwargs
.pop("tries", 100)
171 ret
= xmlrpclib
.Transport
.single_request(self
, *args
, **kwargs
)
173 except socket
.error
, e
:
174 # These kinds of errors are not fatal, but they can happen on
175 # a bad internet connection or whatever.
177 # 110 Connection timeout
178 # 111 Connection refused
179 if not e
.errno
in (32, 110, 111,):
182 except xmlrpclib
.ProtocolError
, e
:
183 # Log all XMLRPC protocol errors.
184 logging
.error("XMLRPC protocol error:")
185 logging
.error(" URL: %s" % e
.url
)
186 logging
.error(" HTTP headers:")
187 for header
in e
.headers
.items():
188 logging
.error(" %s: %s" % header
)
189 logging
.error(" Error code: %s" % e
.errcode
)
190 logging
.error(" Error message: %s" % e
.errmsg
)
194 # If request was successful, we can break the loop.
197 # If the request was not successful, we wait a little time to try
199 logging
.debug("Request was not successful, we wait a little bit and try it again.")
204 logging
.error("Maximum number of tries was reached. Giving up.")
205 # XXX need better exception here.
206 raise Exception, "Could not fulfill request."
211 class Server(object):
212 def __init__(self
, **pakfire_args
):
213 self
.config
= pakfire
.config
.Config()
215 server
= self
.config
._slave
.get("server")
217 logging
.info("Establishing RPC connection to: %s" % server
)
219 self
.conn
= xmlrpclib
.ServerProxy(server
, transport
=XMLRPCTransport(),
225 Return the host's name.
227 return socket
.gethostname()
229 def update_info(self
):
230 # Get the current load average.
231 loadavg
= ", ".join(["%.2f" % l
for l
in os
.getloadavg()])
233 # Get all supported architectures.
234 arches
= sorted([a
for a
in self
.config
.supported_arches
])
235 arches
= " ".join(arches
)
237 # Determine CPU model
239 with
open("/proc/cpuinfo") as f
:
240 for line
in f
.readlines():
241 # Break at an empty line, because all information after that
247 key
, value
= line
.split(":")
249 pass # Skip invalid lines
251 key
, value
= key
.strip(), value
.strip()
255 cpu_model
= cpuinfo
.get("model name", "Could not be determined")
257 # Determine memory size
259 with
open("/proc/meminfo") as f
:
263 a
, b
, c
= line
.split()
269 self
.conn
.update_host_info(loadavg
, cpu_model
, memory
, arches
)
271 def upload_file(self
, filename
, build_id
):
272 # Get the hash of the file.
273 hash = pakfire
.util
.calc_hash1(filename
)
275 # Get the size of the file.
276 size
= os
.path
.getsize(filename
)
278 # Get an upload ID from the server.
279 upload_id
= self
.conn
.get_upload_cookie(os
.path
.basename(filename
),
282 # Calculate the number of chunks.
283 chunks
= (size
/ CHUNK_SIZE
) + 1
285 # Cut the file in pieces and upload them one after another.
286 with
open(filename
) as f
:
289 data
= f
.read(CHUNK_SIZE
)
294 logging
.info("Uploading chunk %s/%s of %s." % (chunk
, chunks
,
295 os
.path
.basename(filename
)))
297 data
= xmlrpclib
.Binary(data
)
298 self
.conn
.upload_chunk(upload_id
, data
)
300 # Tell the server, that we finished the upload.
301 ret
= self
.conn
.finish_upload(upload_id
, build_id
)
303 # If the server sends false, something happened with the upload that
304 # could not be recovered.
306 raise Exception, "Upload failed."
308 def update_build_status(self
, build_id
, status
, message
=""):
309 ret
= self
.conn
.update_build_state(build_id
, status
, message
)
311 # If the server returns False, then it did not acknowledge our status
312 # update and the build has to be aborted.
314 raise BuildAbortedException
, "The build was aborted by the master server."
316 def build_job(self
, type=None):
317 build
= self
.conn
.build_job() # XXX type=None
319 # If the server has got no job for us, we end right here.
324 "binary" : self
.build_binary_job
,
325 "source" : self
.build_source_job
,
328 build_id
= build
["id"]
329 build_type
= build
["type"]
332 func
= job_types
[build_type
]
334 raise Exception, "Build type not supported: %s" % type
336 # Call the function that processes the build and try to catch general
337 # exceptions and report them to the server.
338 # If everything goes okay, we tell this the server, too.
340 func(build_id
, build
)
342 except DependencyError
:
343 # This has already been reported by func.
347 # Format the exception and send it to the server.
348 message
= "%s: %s" % (e
.__class
__.__name
__, e
)
350 self
.update_build_status(build_id
, "failed", message
)
354 self
.update_build_status(build_id
, "finished")
356 def build_binary_job(self
, build_id
, build
):
358 filename
= build
["name"]
359 download
= build
["download"]
360 hash1
= build
["hash1"]
362 # Create a temporary file and a directory for the resulting files.
363 tmpdir
= tempfile
.mkdtemp()
364 tmpfile
= os
.path
.join(tmpdir
, filename
)
365 logfile
= os
.path
.join(tmpdir
, "build.log")
367 # Get a package grabber and add mirror download capabilities to it.
368 grabber
= pakfire
.downloader
.PackageDownloader(self
.config
)
371 # Download the source.
372 grabber
.urlgrab(download
, filename
=tmpfile
)
374 # Check if the download checksum matches.
375 if pakfire
.util
.calc_hash1(tmpfile
) == hash1
:
376 print "Checksum matches: %s" % hash1
378 raise DownloadError
, "Download was corrupted"
380 # Update the build status on the server.
381 self
.update_build_status(build_id
, "running")
384 pakfire
.api
.build(tmpfile
, build_id
=build_id
,
385 resultdirs
=[tmpdir
,], logfile
=logfile
)
387 self
.update_build_status(build_id
, "uploading")
389 # Walk through the result directory and upload all (binary) files.
390 for dir, subdirs
, files
in os
.walk(tmpdir
):
392 file = os
.path
.join(dir, file)
393 if file in (logfile
, tmpfile
,):
396 self
.upload_file(file, build_id
)
398 except DependencyError
, e
:
399 message
= "%s: %s" % (e
.__class
__.__name
__, e
)
400 self
.update_build_status(build_id
, "dependency_error", message
)
404 # Upload the logfile in any case and if it exists.
405 if os
.path
.exists(logfile
):
406 self
.upload_file(logfile
, build_id
)
408 # Cleanup the files we created.
409 pakfire
.util
.rm(tmpdir
)
411 def build_source_job(self
, build_id
, build
):
412 # Update the build status on the server.
413 self
.update_build_status(build_id
, "running")
415 source
= Source(self
, **build
["source"])
417 repo
= source
.update_revision(build
["revision"], build_id
=build_id
)
420 # Upload all files in the repository.
422 path
= os
.path
.join(pkg
.repo
.path
, pkg
.filename
)
423 self
.upload_file(path
, build_id
)
427 def update_repositories(self
, limit
=2):
428 repos
= self
.conn
.get_repos(limit
)
431 files
= self
.conn
.get_repo_packages(repo
["id"])
433 for arch
in repo
["arches"]:
434 path
= "/pakfire/repositories/%s/%s/%s" % \
435 (repo
["distro"]["sname"], repo
["name"], arch
)
437 pakfire
.api
.repo_create(path
, files
)