]>
git.ipfire.org Git - people/jschlag/pbs.git/blob - src/buildservice/repository.py
e842d69c4ac171fea27ff4503240f020741afded
6 log
= logging
.getLogger("repositories")
12 from .constants
import *
13 from .decorators
import *
15 class Repositories(base
.Object
):
16 def _get_repository(self
, query
, *args
):
17 res
= self
.db
.get(query
, *args
)
20 return Repository(self
.backend
, res
.id, data
=res
)
22 def _get_repositories(self
, query
, *args
):
23 res
= self
.db
.query(query
, *args
)
26 yield Repository(self
.backend
, row
.id, data
=row
)
29 repositories
= self
._get
_repositories
("SELECT * FROM repositories \
30 WHERE deleted IS FALSE ORDER BY distro_id, name")
32 return iter(repositories
)
34 def create(self
, distro
, name
, description
):
35 return self
._get
_repository
("INSERT INTO repositories(distro_id, name, description) \
36 VALUES(%s, %s, %s) RETURNING *", distro
.id, name
, description
)
38 def get_by_id(self
, repo_id
):
39 return self
._get
_repository
("SELECT * FROM repositories \
40 WHERE id = %s", repo_id
)
42 def get_history(self
, limit
=None, offset
=None, build
=None, repo
=None, user
=None):
43 query
= "SELECT * FROM repositories_history"
46 query
+= " ORDER BY time DESC"
50 query
+= " LIMIT %s,%s"
51 args
+= [offset
, limit
,]
57 for entry
in self
.db
.query(query
, *args
):
58 entry
= logs
.RepositoryLogEntry(self
.pakfire
, entry
)
65 Remasters all repositories
68 # Skip all repositories that don't need an update
69 if not repo
.needs_update
:
70 log
.debug("Repository %s does not need an update" % repo
)
73 with self
.db
.transaction():
77 class Repository(base
.DataObject
):
78 table
= "repositories"
80 def __eq__(self
, other
):
81 if isinstance(other
, self
.__class
__):
82 return self
.id == other
.id
84 def __lt__(self
, other
):
85 if isinstance(other
, self
.__class
__):
86 return self
.parent_id
== other
.id
89 builds
= self
.backend
.builds
._get
_builds
("SELECT builds.* FROM repositories_builds \
90 LEFT JOIN builds ON repositories_builds.build_id = builds.id \
91 WHERE repositories_builds.repo_id = %s", self
.id)
96 res
= self
.db
.get("SELECT COUNT(*) AS len FROM repositories_builds \
97 WHERE repo_id = %s", self
.id)
103 return self
.backend
.repos
._get
_repository
("SELECT * FROM repositories \
104 WHERE parent_id = %s", self
.id)
108 if self
.data
.parent_id
:
109 return self
.backend
.repos
._get
_repository
("SELECT * FROM repositories \
110 WHERE id = %s", self
.data
.parent_id
)
114 return self
.backend
.distros
.get_by_id(self
.data
.distro_id
)
120 "distro" : self
.distro
.info
,
122 "arches" : self
.arches
,
128 self
.settings
.get("repository_baseurl", "http://pakfire.ipfire.org/repositories/"),
129 self
.distro
.identifier
,
137 def mirrorlist(self
):
139 self
.settings
.get("mirrorlist_baseurl", "https://pakfire.ipfire.org/"),
140 "distro", self
.distro
.identifier
,
141 "repo", self
.identifier
,
142 "mirrorlist?arch=%{arch}"
155 priority
= prioritymap
[self
.type]
160 "[repo:%s]" % self
.identifier
,
161 "description = %s - %s" % (self
.distro
.name
, self
.summary
),
163 "baseurl = %s" % self
.url
,
164 "mirrors = %s" % self
.mirrorlist
,
168 lines
.append("priority = %s" % priority
)
170 return "\n".join(lines
)
174 return self
.data
.name
177 def identifier(self
):
178 return self
.name
.lower()
182 return self
.data
.type
186 lines
= self
.description
.splitlines()
194 def description(self
):
195 return self
.data
.description
or ""
199 return self
.data
.parent_id
203 if not self
.data
.key_id
:
206 return self
.pakfire
.keys
.get_by_id(self
.data
.key_id
)
210 return self
.distro
.arches
+ ["src"]
214 return self
.data
.mirrored
216 def set_enabled_for_builds(self
, state
):
217 self
._set
_attribute
("enabled_for_builds", state
)
219 enabled_for_builds
= property(lambda s
: s
.data
.enabled_for_builds
, set_enabled_for_builds
)
222 def score_needed(self
):
223 return self
.data
.score_needed
227 return self
.data
.time_min
231 return self
.data
.time_max
233 def _log_build(self
, action
, build
, from_repo
=None, to_repo
=None, user
=None):
240 from_repo_id
= from_repo
.id
244 to_repo_id
= to_repo
.id
246 self
.db
.execute("INSERT INTO repositories_history(action, build_id, from_repo_id, to_repo_id, user_id, time) \
247 VALUES(%s, %s, %s, %s, %s, NOW())", action
, build
.id, from_repo_id
, to_repo_id
, user_id
)
249 def add_build(self
, build
, user
=None, log
=True):
250 self
.db
.execute("INSERT INTO repositories_builds(repo_id, build_id, time_added)"
251 " VALUES(%s, %s, NOW())", self
.id, build
.id)
254 build
._update
_bugs
_helper
(self
)
257 self
._log
_build
("added", build
, to_repo
=self
, user
=user
)
259 def rem_build(self
, build
, user
=None, log
=True):
260 self
.db
.execute("DELETE FROM repositories_builds \
261 WHERE repo_id = %s AND build_id = %s", self
.id, build
.id)
264 self
._log
_build
("removed", build
, from_repo
=self
, user
=user
)
266 def move_build(self
, build
, to_repo
, user
=None, log
=True):
267 self
.db
.execute("UPDATE repositories_builds SET repo_id = %s, time_added = NOW() \
268 WHERE repo_id = %s AND build_id = %s", to_repo
.id, self
.id, build
.id)
271 build
._update
_bugs
_helper
(to_repo
)
274 self
._log
_build
("moved", build
, from_repo
=self
, to_repo
=to_repo
,
277 def get_builds(self
, limit
=None, offset
=None):
278 query
= "SELECT build_id AS id FROM repositories_builds \
279 WHERE repo_id = %s ORDER BY time_added DESC"
284 query
+= " LIMIT %s,%s"
285 args
+= [offset
, limit
,]
291 for build
in self
.db
.query(query
, *args
):
292 build
= self
.pakfire
.builds
.get_by_id(build
.id)
295 _builds
.append(build
)
299 def _get_packages(self
, arch
):
300 if arch
.name
== "src":
301 pkgs
= self
.db
.query("SELECT packages.id AS id, packages.path AS path FROM packages \
302 JOIN builds ON builds.pkg_id = packages.id \
303 JOIN repositories_builds ON builds.id = repositories_builds.build_id \
304 WHERE packages.arch = %s AND repositories_builds.repo_id = %s",
308 pkgs
= self
.db
.query("SELECT packages.id AS id, packages.path AS path FROM packages \
309 JOIN jobs_packages ON jobs_packages.pkg_id = packages.id \
310 JOIN jobs ON jobs_packages.job_id = jobs.id \
311 JOIN builds ON builds.id = jobs.build_id \
312 JOIN repositories_builds ON builds.id = repositories_builds.build_id \
313 WHERE (jobs.arch = %s OR jobs.arch = %s) AND \
314 repositories_builds.repo_id = %s",
315 arch
.name
, "noarch", self
.id)
319 def get_packages(self
, arch
):
320 pkgs
= [self
.pakfire
.packages
.get_by_id(p
.id) for p
in self
._get
_packages
(arch
)]
325 def get_paths(self
, arch
):
326 paths
= [p
.path
for p
in self
._get
_packages
(arch
)]
333 return self
.get_packages()
336 def unpushed_builds(self
):
337 return self
.backend
.builds
._get
_builds
("SELECT builds.* FROM repositories \
338 LEFT JOIN repositories_builds ON repositories.id = repositories_builds.repo_id \
339 LEFT JOIN builds ON repositories_builds.build_id = builds.id \
340 WHERE repositories.id = %s \
341 AND repositories_builds.time_added >= repositories.last_update", self
.id)
343 def get_obsolete_builds(self
):
344 return self
.pakfire
.builds
.get_obsolete(self
)
347 def needs_update(self
):
348 if self
.unpushed_builds
:
354 self
.db
.execute("UPDATE repositories SET last_update = NOW() \
355 WHERE id = %s", self
.id)
358 log
.info("Going to update repository %s..." % self
.name
)
360 # Update the timestamp when we started at last.
363 for arch
in self
.arches
:
366 # Get all package paths that are to be included in this repository.
367 paths
= self
.get_paths(arch
)
369 repo_path
= os
.path
.join(
371 self
.distro
.identifier
,
376 if not os
.path
.exists(repo_path
):
377 os
.makedirs(repo_path
)
382 for filename
in os
.listdir(repo_path
):
383 path
= os
.path
.join(repo_path
, filename
)
385 if not os
.path
.isfile(path
):
388 remove_files
.append(path
)
391 filename
= os
.path
.basename(path
)
393 source_file
= os
.path
.join(PACKAGES_DIR
, path
)
394 target_file
= os
.path
.join(repo_path
, filename
)
396 # Do not add duplicate files twice.
397 if source_file
in source_files
:
400 source_files
.append(source_file
)
403 remove_files
.remove(target_file
)
410 # If nothing in the repository data has changed, there
413 log
.info("The repository has updates...")
415 log
.info("Nothing to update.")
418 # Find the key to sign the package.
421 key_id
= self
.key
.fingerprint
423 # Create package index.
424 p
= pakfire
.PakfireServer(arch
=arch
)
426 p
.repo_create(repo_path
, source_files
,
427 name
="%s - %s.%s" % (self
.distro
.name
, self
.name
, arch
),
430 # Remove files afterwards.
431 for file in remove_files
:
432 file = os
.path
.join(repo_path
, file)
437 log
.warning("Could not remove %s." % file)
439 def get_history(self
, **kwargs
):
444 return self
.pakfire
.repos
.get_history(**kwargs
)
446 def get_build_times(self
):
448 for arch
in self
.arches
:
449 time
= self
.db
.get("SELECT SUM(jobs.time_finished - jobs.time_started) AS time FROM jobs \
450 JOIN builds ON builds.id = jobs.build_id \
451 JOIN repositories_builds ON builds.id = repositories_builds.build_id \
452 WHERE (jobs.arch = %s OR jobs.arch = %s) AND \
453 jobs.type = 'build' AND \
454 repositories_builds.repo_id = %s", arch
, "noarch", self
.id)
456 times
.append((arch
, time
.time
.total_seconds()))
461 class RepositoryAux(base
.DataObject
):
462 table
= "repositories_aux"
466 return self
.data
.name
469 def description(self
):
470 return self
.data
.description
or ""
477 def identifier(self
):
478 return self
.name
.lower()
482 return self
.pakfire
.distros
.get_by_id(self
.data
.distro_id
)
486 "[repo:%s]" % self
.identifier
,
487 "description = %s - %s" % (self
.distro
.name
, self
.name
),
489 "baseurl = %s" % self
.url
,
493 return "\n".join(lines
)