]>
git.ipfire.org Git - people/jschlag/pbs.git/blob - src/buildservice/repository.py
8018cd161540a6b755816dd697fc08089f3b7aac
6 log
= logging
.getLogger("repositories")
12 from .constants
import *
13 from .decorators
import *
15 class Repositories(base
.Object
):
16 def _get_repository(self
, query
, *args
):
17 res
= self
.db
.get(query
, *args
)
20 return Repository(self
.backend
, res
.id, data
=res
)
22 def _get_repositories(self
, query
, *args
):
23 res
= self
.db
.query(query
, *args
)
26 yield Repository(self
.backend
, row
.id, data
=row
)
29 repositories
= self
._get
_repositories
("SELECT * FROM repositories \
30 WHERE deleted IS FALSE ORDER BY distro_id, name")
32 return iter(repositories
)
34 def create(self
, distro
, name
, description
):
35 return self
._get
_repository
("INSERT INTO repositories(distro_id, name, description) \
36 VALUES(%s, %s, %s) RETURNING *", distro
.id, name
, description
)
38 def get_by_id(self
, repo_id
):
39 return self
._get
_repository
("SELECT * FROM repositories \
40 WHERE id = %s", repo_id
)
42 def get_history(self
, limit
=None, offset
=None, build
=None, repo
=None, user
=None):
43 query
= "SELECT * FROM repositories_history"
46 query
+= " ORDER BY time DESC"
50 query
+= " LIMIT %s,%s"
51 args
+= [offset
, limit
,]
57 for entry
in self
.db
.query(query
, *args
):
58 entry
= logs
.RepositoryLogEntry(self
.pakfire
, entry
)
65 Remasters all repositories
68 # Skip all repositories that don't need an update
69 if not repo
.needs_update
:
70 log
.debug("Repository %s does not need an update" % repo
)
73 with self
.db
.transaction():
77 class Repository(base
.DataObject
):
78 table
= "repositories"
80 def __eq__(self
, other
):
81 if isinstance(other
, self
.__class
__):
82 return self
.id == other
.id
84 def __lt__(self
, other
):
85 if isinstance(other
, self
.__class
__):
86 return self
.parent_id
== other
.id
89 builds
= self
.backend
.builds
._get
_builds
("SELECT builds.* FROM repositories_builds \
90 LEFT JOIN builds ON repositories_builds.build_id = builds.id \
91 WHERE repositories_builds.repo_id = %s", self
.id)
96 res
= self
.db
.get("SELECT COUNT(*) AS len FROM repositories_builds \
97 WHERE repo_id = %s", self
.id)
103 return self
.backend
.repos
._get
_repository
("SELECT * FROM repositories \
104 WHERE parent_id = %s", self
.id)
108 if self
.data
.parent_id
:
109 return self
.backend
.repos
._get
_repository
("SELECT * FROM repositories \
110 WHERE id = %s", self
.data
.parent_id
)
114 return self
.backend
.distros
.get_by_id(self
.data
.distro_id
)
116 def set_priority(self
, priority
):
117 self
._set
_attribute
("priority", priority
)
119 priority
= property(lambda s
: s
.data
.priority
, set_priority
)
125 "distro" : self
.distro
.info
,
127 "arches" : self
.arches
,
133 self
.settings
.get("repository_baseurl", "http://pakfire.ipfire.org/repositories/"),
134 self
.distro
.identifier
,
142 def mirrorlist(self
):
144 self
.settings
.get("mirrorlist_baseurl", "https://pakfire.ipfire.org/"),
145 "distro", self
.distro
.identifier
,
146 "repo", self
.identifier
,
147 "mirrorlist?arch=%{arch}"
154 "[repo:%s]" % self
.identifier
,
155 "description = %s - %s" % (self
.distro
.name
, self
.summary
),
157 "baseurl = %s" % self
.url
,
158 "mirrors = %s" % self
.mirrorlist
,
162 lines
.append("priority = %s" % self
.priority
)
164 return "\n".join(lines
)
168 return self
.data
.name
171 def identifier(self
):
172 return self
.name
.lower()
176 return self
.data
.type
180 lines
= self
.description
.splitlines()
188 def description(self
):
189 return self
.data
.description
or ""
193 return self
.data
.parent_id
197 if not self
.data
.key_id
:
200 return self
.pakfire
.keys
.get_by_id(self
.data
.key_id
)
204 return self
.distro
.arches
+ ["src"]
208 return self
.data
.mirrored
210 def set_enabled_for_builds(self
, state
):
211 self
._set
_attribute
("enabled_for_builds", state
)
213 enabled_for_builds
= property(lambda s
: s
.data
.enabled_for_builds
, set_enabled_for_builds
)
216 def score_needed(self
):
217 return self
.data
.score_needed
221 return self
.data
.time_min
225 return self
.data
.time_max
227 def _log_build(self
, action
, build
, from_repo
=None, to_repo
=None, user
=None):
234 from_repo_id
= from_repo
.id
238 to_repo_id
= to_repo
.id
240 self
.db
.execute("INSERT INTO repositories_history(action, build_id, from_repo_id, to_repo_id, user_id, time) \
241 VALUES(%s, %s, %s, %s, %s, NOW())", action
, build
.id, from_repo_id
, to_repo_id
, user_id
)
243 def add_build(self
, build
, user
=None, log
=True):
244 self
.db
.execute("INSERT INTO repositories_builds(repo_id, build_id, time_added)"
245 " VALUES(%s, %s, NOW())", self
.id, build
.id)
248 build
._update
_bugs
_helper
(self
)
251 self
._log
_build
("added", build
, to_repo
=self
, user
=user
)
253 def rem_build(self
, build
, user
=None, log
=True):
254 self
.db
.execute("DELETE FROM repositories_builds \
255 WHERE repo_id = %s AND build_id = %s", self
.id, build
.id)
258 self
._log
_build
("removed", build
, from_repo
=self
, user
=user
)
260 def move_build(self
, build
, to_repo
, user
=None, log
=True):
261 self
.db
.execute("UPDATE repositories_builds SET repo_id = %s, time_added = NOW() \
262 WHERE repo_id = %s AND build_id = %s", to_repo
.id, self
.id, build
.id)
265 build
._update
_bugs
_helper
(to_repo
)
268 self
._log
_build
("moved", build
, from_repo
=self
, to_repo
=to_repo
,
271 def get_builds(self
, limit
=None, offset
=None):
272 query
= "SELECT build_id AS id FROM repositories_builds \
273 WHERE repo_id = %s ORDER BY time_added DESC"
278 query
+= " LIMIT %s,%s"
279 args
+= [offset
, limit
,]
285 for build
in self
.db
.query(query
, *args
):
286 build
= self
.pakfire
.builds
.get_by_id(build
.id)
289 _builds
.append(build
)
293 def _get_packages(self
, arch
):
294 if arch
.name
== "src":
295 pkgs
= self
.db
.query("SELECT packages.id AS id, packages.path AS path FROM packages \
296 JOIN builds ON builds.pkg_id = packages.id \
297 JOIN repositories_builds ON builds.id = repositories_builds.build_id \
298 WHERE packages.arch = %s AND repositories_builds.repo_id = %s",
302 pkgs
= self
.db
.query("SELECT packages.id AS id, packages.path AS path FROM packages \
303 JOIN jobs_packages ON jobs_packages.pkg_id = packages.id \
304 JOIN jobs ON jobs_packages.job_id = jobs.id \
305 JOIN builds ON builds.id = jobs.build_id \
306 JOIN repositories_builds ON builds.id = repositories_builds.build_id \
307 WHERE (jobs.arch = %s OR jobs.arch = %s) AND \
308 repositories_builds.repo_id = %s",
309 arch
.name
, "noarch", self
.id)
313 def get_packages(self
, arch
):
314 pkgs
= [self
.pakfire
.packages
.get_by_id(p
.id) for p
in self
._get
_packages
(arch
)]
319 def get_paths(self
, arch
):
320 paths
= [p
.path
for p
in self
._get
_packages
(arch
)]
327 return self
.get_packages()
330 def unpushed_builds(self
):
331 return self
.backend
.builds
._get
_builds
("SELECT builds.* FROM repositories \
332 LEFT JOIN repositories_builds ON repositories.id = repositories_builds.repo_id \
333 LEFT JOIN builds ON repositories_builds.build_id = builds.id \
334 WHERE repositories.id = %s \
335 AND repositories_builds.time_added >= repositories.last_update", self
.id)
337 def get_obsolete_builds(self
):
338 return self
.pakfire
.builds
.get_obsolete(self
)
341 def needs_update(self
):
342 if self
.unpushed_builds
:
348 self
.db
.execute("UPDATE repositories SET last_update = NOW() \
349 WHERE id = %s", self
.id)
352 log
.info("Going to update repository %s..." % self
.name
)
354 # Update the timestamp when we started at last.
357 for arch
in self
.arches
:
360 # Get all package paths that are to be included in this repository.
361 paths
= self
.get_paths(arch
)
363 repo_path
= os
.path
.join(
365 self
.distro
.identifier
,
370 if not os
.path
.exists(repo_path
):
371 os
.makedirs(repo_path
)
376 for filename
in os
.listdir(repo_path
):
377 path
= os
.path
.join(repo_path
, filename
)
379 if not os
.path
.isfile(path
):
382 remove_files
.append(path
)
385 filename
= os
.path
.basename(path
)
387 source_file
= os
.path
.join(PACKAGES_DIR
, path
)
388 target_file
= os
.path
.join(repo_path
, filename
)
390 # Do not add duplicate files twice.
391 if source_file
in source_files
:
394 source_files
.append(source_file
)
397 remove_files
.remove(target_file
)
404 # If nothing in the repository data has changed, there
407 log
.info("The repository has updates...")
409 log
.info("Nothing to update.")
412 # Find the key to sign the package.
415 key_id
= self
.key
.fingerprint
417 # Create package index.
418 p
= pakfire
.PakfireServer(arch
=arch
)
420 p
.repo_create(repo_path
, source_files
,
421 name
="%s - %s.%s" % (self
.distro
.name
, self
.name
, arch
),
424 # Remove files afterwards.
425 for file in remove_files
:
426 file = os
.path
.join(repo_path
, file)
431 log
.warning("Could not remove %s." % file)
433 def get_history(self
, **kwargs
):
438 return self
.pakfire
.repos
.get_history(**kwargs
)
440 def get_build_times(self
):
442 for arch
in self
.arches
:
443 time
= self
.db
.get("SELECT SUM(jobs.time_finished - jobs.time_started) AS time FROM jobs \
444 JOIN builds ON builds.id = jobs.build_id \
445 JOIN repositories_builds ON builds.id = repositories_builds.build_id \
446 WHERE (jobs.arch = %s OR jobs.arch = %s) AND \
447 jobs.type = 'build' AND \
448 repositories_builds.repo_id = %s", arch
, "noarch", self
.id)
450 times
.append((arch
, time
.time
.total_seconds()))
455 class RepositoryAux(base
.DataObject
):
456 table
= "repositories_aux"
460 return self
.data
.name
463 def description(self
):
464 return self
.data
.description
or ""
471 def identifier(self
):
472 return self
.name
.lower()
476 return self
.pakfire
.distros
.get_by_id(self
.data
.distro_id
)
480 "[repo:%s]" % self
.identifier
,
481 "description = %s - %s" % (self
.distro
.name
, self
.name
),
483 "baseurl = %s" % self
.url
,
487 return "\n".join(lines
)