]>
Commit | Line | Data |
---|---|---|
1 | #!/usr/bin/python | |
2 | ||
3 | import logging | |
4 | import os.path | |
5 | ||
6 | log = logging.getLogger("repositories") | |
7 | log.propagate = 1 | |
8 | ||
9 | from . import base | |
10 | from . import logs | |
11 | ||
12 | from .constants import * | |
13 | from .decorators import * | |
14 | ||
15 | class Repositories(base.Object): | |
16 | def _get_repository(self, query, *args): | |
17 | res = self.db.get(query, *args) | |
18 | ||
19 | if res: | |
20 | return Repository(self.backend, res.id, data=res) | |
21 | ||
22 | def _get_repositories(self, query, *args): | |
23 | res = self.db.query(query, *args) | |
24 | ||
25 | for row in res: | |
26 | yield Repository(self.backend, row.id, data=row) | |
27 | ||
28 | def __iter__(self): | |
29 | repositories = self._get_repositories("SELECT * FROM repositories \ | |
30 | WHERE deleted IS FALSE ORDER BY distro_id, name") | |
31 | ||
32 | return iter(repositories) | |
33 | ||
34 | def create(self, distro, name, description): | |
35 | return self._get_repository("INSERT INTO repositories(distro_id, name, description) \ | |
36 | VALUES(%s, %s, %s) RETURNING *", distro.id, name, description) | |
37 | ||
38 | def get_by_id(self, repo_id): | |
39 | return self._get_repository("SELECT * FROM repositories \ | |
40 | WHERE id = %s", repo_id) | |
41 | ||
42 | def get_history(self, limit=None, offset=None, build=None, repo=None, user=None): | |
43 | query = "SELECT * FROM repositories_history" | |
44 | args = [] | |
45 | ||
46 | query += " ORDER BY time DESC" | |
47 | ||
48 | if limit: | |
49 | if offset: | |
50 | query += " LIMIT %s,%s" | |
51 | args += [offset, limit,] | |
52 | else: | |
53 | query += " LIMIT %s" | |
54 | args += [limit,] | |
55 | ||
56 | entries = [] | |
57 | for entry in self.db.query(query, *args): | |
58 | entry = logs.RepositoryLogEntry(self.pakfire, entry) | |
59 | entries.append(entry) | |
60 | ||
61 | return entries | |
62 | ||
63 | def remaster(self): | |
64 | """ | |
65 | Remasters all repositories | |
66 | """ | |
67 | for repo in self: | |
68 | # Skip all repositories that don't need an update | |
69 | if not repo.needs_update: | |
70 | log.debug("Repository %s does not need an update" % repo) | |
71 | continue | |
72 | ||
73 | with self.db.transaction(): | |
74 | repo.remaster() | |
75 | ||
76 | ||
77 | class Repository(base.DataObject): | |
78 | table = "repositories" | |
79 | ||
80 | def __eq__(self, other): | |
81 | if isinstance(other, self.__class__): | |
82 | return self.id == other.id | |
83 | ||
84 | def __lt__(self, other): | |
85 | if isinstance(other, self.__class__): | |
86 | return self.parent_id == other.id | |
87 | ||
88 | def __iter__(self): | |
89 | builds = self.backend.builds._get_builds("SELECT builds.* FROM repositories_builds \ | |
90 | LEFT JOIN builds ON repositories_builds.build_id = builds.id \ | |
91 | WHERE repositories_builds.repo_id = %s", self.id) | |
92 | ||
93 | return iter(builds) | |
94 | ||
95 | def __len__(self): | |
96 | res = self.db.get("SELECT COUNT(*) AS len FROM repositories_builds \ | |
97 | WHERE repo_id = %s", self.id) | |
98 | ||
99 | return res.len | |
100 | ||
101 | def __nonzero__(self): | |
102 | return True | |
103 | ||
104 | @lazy_property | |
105 | def next(self): | |
106 | return self.backend.repos._get_repository("SELECT * FROM repositories \ | |
107 | WHERE parent_id = %s", self.id) | |
108 | ||
109 | @lazy_property | |
110 | def parent(self): | |
111 | if self.data.parent_id: | |
112 | return self.backend.repos._get_repository("SELECT * FROM repositories \ | |
113 | WHERE id = %s", self.data.parent_id) | |
114 | ||
115 | @lazy_property | |
116 | def distro(self): | |
117 | return self.backend.distros.get_by_id(self.data.distro_id) | |
118 | ||
119 | def set_priority(self, priority): | |
120 | self._set_attribute("priority", priority) | |
121 | ||
122 | priority = property(lambda s: s.data.priority, set_priority) | |
123 | ||
124 | def get_user(self): | |
125 | if self.data.user_id: | |
126 | return self.backend.users.get_by_id(self.data.user_id) | |
127 | ||
128 | def set_user(self, user): | |
129 | self._set_attribute("user_id", user.id) | |
130 | ||
131 | user = property(get_user, set_user) | |
132 | ||
133 | @property | |
134 | def info(self): | |
135 | return { | |
136 | "id" : self.id, | |
137 | "distro" : self.distro.info, | |
138 | "name" : self.name, | |
139 | "arches" : self.arches, | |
140 | } | |
141 | ||
142 | @property | |
143 | def basepath(self): | |
144 | return "/".join(( | |
145 | self.distro.identifier, | |
146 | self.identifier, | |
147 | )) | |
148 | ||
149 | @property | |
150 | def path(self): | |
151 | return os.path.join(REPOS_DIR, self.basepath, "%{arch}") | |
152 | ||
153 | @property | |
154 | def url(self): | |
155 | return os.path.join( | |
156 | self.settings.get("baseurl", "https://pakfire.ipfire.org"), | |
157 | "repositories", | |
158 | self.basepath, | |
159 | "%{arch}" | |
160 | ) | |
161 | ||
162 | @property | |
163 | def mirrorlist(self): | |
164 | return os.path.join( | |
165 | self.settings.get("baseurl", "https://pakfire.ipfire.org"), | |
166 | "distro", self.distro.identifier, | |
167 | "repo", self.identifier, | |
168 | "mirrorlist?arch=%{arch}" | |
169 | ) | |
170 | ||
171 | def get_conf(self, local=False): | |
172 | lines = [ | |
173 | "[repo:%s]" % self.identifier, | |
174 | "description = %s - %s" % (self.distro.name, self.summary), | |
175 | "enabled = 1", | |
176 | "baseurl = %s" % (self.path if local else self.url), | |
177 | ] | |
178 | ||
179 | if self.mirrored and not local: | |
180 | lines.append("mirrors = %s" % self.mirrorlist) | |
181 | ||
182 | if self.priority: | |
183 | lines.append("priority = %s" % self.priority) | |
184 | ||
185 | return "\n".join(lines) | |
186 | ||
187 | @property | |
188 | def name(self): | |
189 | return self.data.name | |
190 | ||
191 | @property | |
192 | def identifier(self): | |
193 | return self.name.lower() | |
194 | ||
195 | @property | |
196 | def type(self): | |
197 | return self.data.type | |
198 | ||
199 | @property | |
200 | def summary(self): | |
201 | lines = self.description.splitlines() | |
202 | ||
203 | if lines: | |
204 | return lines[0] | |
205 | ||
206 | return "N/A" | |
207 | ||
208 | @property | |
209 | def description(self): | |
210 | return self.data.description or "" | |
211 | ||
212 | @property | |
213 | def parent_id(self): | |
214 | return self.data.parent_id | |
215 | ||
216 | @lazy_property | |
217 | def key(self): | |
218 | if not self.data.key_id: | |
219 | return | |
220 | ||
221 | return self.pakfire.keys.get_by_id(self.data.key_id) | |
222 | ||
223 | @property | |
224 | def arches(self): | |
225 | return self.distro.arches + ["src"] | |
226 | ||
227 | def set_mirrored(self, mirrored): | |
228 | self._set_attribute("mirrored", mirrored) | |
229 | ||
230 | mirrored = property(lambda s: s.data.mirrored, set_mirrored) | |
231 | ||
232 | def set_enabled_for_builds(self, state): | |
233 | self._set_attribute("enabled_for_builds", state) | |
234 | ||
235 | enabled_for_builds = property(lambda s: s.data.enabled_for_builds, set_enabled_for_builds) | |
236 | ||
237 | @property | |
238 | def score_needed(self): | |
239 | return self.data.score_needed | |
240 | ||
241 | @property | |
242 | def time_min(self): | |
243 | return self.data.time_min | |
244 | ||
245 | @property | |
246 | def time_max(self): | |
247 | return self.data.time_max | |
248 | ||
249 | def _log_build(self, action, build, from_repo=None, to_repo=None, user=None): | |
250 | user_id = None | |
251 | if user: | |
252 | user_id = user.id | |
253 | ||
254 | from_repo_id = None | |
255 | if from_repo: | |
256 | from_repo_id = from_repo.id | |
257 | ||
258 | to_repo_id = None | |
259 | if to_repo: | |
260 | to_repo_id = to_repo.id | |
261 | ||
262 | self.db.execute("INSERT INTO repositories_history(action, build_id, from_repo_id, to_repo_id, user_id, time) \ | |
263 | VALUES(%s, %s, %s, %s, %s, NOW())", action, build.id, from_repo_id, to_repo_id, user_id) | |
264 | ||
265 | def add_build(self, build, user=None, log=True): | |
266 | self.db.execute("INSERT INTO repositories_builds(repo_id, build_id, time_added)" | |
267 | " VALUES(%s, %s, NOW())", self.id, build.id) | |
268 | ||
269 | # Update bug status. | |
270 | build._update_bugs_helper(self) | |
271 | ||
272 | if log: | |
273 | self._log_build("added", build, to_repo=self, user=user) | |
274 | ||
275 | def rem_build(self, build, user=None, log=True): | |
276 | self.db.execute("DELETE FROM repositories_builds \ | |
277 | WHERE repo_id = %s AND build_id = %s", self.id, build.id) | |
278 | ||
279 | if log: | |
280 | self._log_build("removed", build, from_repo=self, user=user) | |
281 | ||
282 | def move_build(self, build, to_repo, user=None, log=True): | |
283 | self.db.execute("UPDATE repositories_builds SET repo_id = %s, time_added = NOW() \ | |
284 | WHERE repo_id = %s AND build_id = %s", to_repo.id, self.id, build.id) | |
285 | ||
286 | # Update bug status. | |
287 | build._update_bugs_helper(to_repo) | |
288 | ||
289 | if log: | |
290 | self._log_build("moved", build, from_repo=self, to_repo=to_repo, | |
291 | user=user) | |
292 | ||
293 | def get_builds(self, limit=None, offset=None): | |
294 | query = "SELECT build_id AS id FROM repositories_builds \ | |
295 | WHERE repo_id = %s ORDER BY time_added DESC" | |
296 | args = [self.id,] | |
297 | ||
298 | if limit: | |
299 | if offset: | |
300 | query += " LIMIT %s,%s" | |
301 | args += [offset, limit,] | |
302 | else: | |
303 | query += " LIMIT %s" | |
304 | args += [limit,] | |
305 | ||
306 | _builds = [] | |
307 | for build in self.db.query(query, *args): | |
308 | build = self.pakfire.builds.get_by_id(build.id) | |
309 | build._repo = self | |
310 | ||
311 | _builds.append(build) | |
312 | ||
313 | return _builds | |
314 | ||
315 | def _get_packages(self, arch): | |
316 | if arch.name == "src": | |
317 | pkgs = self.db.query("SELECT packages.id AS id, packages.path AS path FROM packages \ | |
318 | JOIN builds ON builds.pkg_id = packages.id \ | |
319 | JOIN repositories_builds ON builds.id = repositories_builds.build_id \ | |
320 | WHERE packages.arch = %s AND repositories_builds.repo_id = %s", | |
321 | arch.name, self.id) | |
322 | ||
323 | else: | |
324 | pkgs = self.db.query("SELECT packages.id AS id, packages.path AS path FROM packages \ | |
325 | JOIN jobs_packages ON jobs_packages.pkg_id = packages.id \ | |
326 | JOIN jobs ON jobs_packages.job_id = jobs.id \ | |
327 | JOIN builds ON builds.id = jobs.build_id \ | |
328 | JOIN repositories_builds ON builds.id = repositories_builds.build_id \ | |
329 | WHERE (jobs.arch = %s OR jobs.arch = %s) AND \ | |
330 | repositories_builds.repo_id = %s", | |
331 | arch.name, "noarch", self.id) | |
332 | ||
333 | return pkgs | |
334 | ||
335 | def get_packages(self, arch): | |
336 | pkgs = [self.pakfire.packages.get_by_id(p.id) for p in self._get_packages(arch)] | |
337 | pkgs.sort() | |
338 | ||
339 | return pkgs | |
340 | ||
341 | def get_paths(self, arch): | |
342 | paths = [p.path for p in self._get_packages(arch)] | |
343 | paths.sort() | |
344 | ||
345 | return paths | |
346 | ||
347 | @property | |
348 | def packages(self): | |
349 | return self.get_packages() | |
350 | ||
351 | @property | |
352 | def unpushed_builds(self): | |
353 | return self.backend.builds._get_builds("SELECT builds.* FROM repositories \ | |
354 | LEFT JOIN repositories_builds ON repositories.id = repositories_builds.repo_id \ | |
355 | LEFT JOIN builds ON repositories_builds.build_id = builds.id \ | |
356 | WHERE repositories.id = %s \ | |
357 | AND repositories_builds.time_added >= repositories.last_update", self.id) | |
358 | ||
359 | def get_obsolete_builds(self): | |
360 | return self.pakfire.builds.get_obsolete(self) | |
361 | ||
362 | @property | |
363 | def needs_update(self): | |
364 | if self.unpushed_builds: | |
365 | return True | |
366 | ||
367 | return False | |
368 | ||
369 | def updated(self): | |
370 | self.db.execute("UPDATE repositories SET last_update = NOW() \ | |
371 | WHERE id = %s", self.id) | |
372 | ||
373 | def remaster(self): | |
374 | log.info("Going to update repository %s..." % self.name) | |
375 | ||
376 | # Update the timestamp when we started at last. | |
377 | self.updated() | |
378 | ||
379 | for arch in self.arches: | |
380 | changed = False | |
381 | ||
382 | # Get all package paths that are to be included in this repository. | |
383 | paths = self.get_paths(arch) | |
384 | ||
385 | repo_path = os.path.join( | |
386 | REPOS_DIR, | |
387 | self.distro.identifier, | |
388 | self.identifier, | |
389 | arch | |
390 | ) | |
391 | ||
392 | if not os.path.exists(repo_path): | |
393 | os.makedirs(repo_path) | |
394 | ||
395 | source_files = [] | |
396 | remove_files = [] | |
397 | ||
398 | for filename in os.listdir(repo_path): | |
399 | path = os.path.join(repo_path, filename) | |
400 | ||
401 | if not os.path.isfile(path): | |
402 | continue | |
403 | ||
404 | remove_files.append(path) | |
405 | ||
406 | for path in paths: | |
407 | filename = os.path.basename(path) | |
408 | ||
409 | source_file = os.path.join(PACKAGES_DIR, path) | |
410 | target_file = os.path.join(repo_path, filename) | |
411 | ||
412 | # Do not add duplicate files twice. | |
413 | if source_file in source_files: | |
414 | continue | |
415 | ||
416 | source_files.append(source_file) | |
417 | ||
418 | try: | |
419 | remove_files.remove(target_file) | |
420 | except ValueError: | |
421 | changed = True | |
422 | ||
423 | if remove_files: | |
424 | changed = True | |
425 | ||
426 | # If nothing in the repository data has changed, there | |
427 | # is nothing to do. | |
428 | if changed: | |
429 | log.info("The repository has updates...") | |
430 | else: | |
431 | log.info("Nothing to update.") | |
432 | continue | |
433 | ||
434 | # Find the key to sign the package. | |
435 | key_id = None | |
436 | if repo.key: | |
437 | key_id = self.key.fingerprint | |
438 | ||
439 | # Create package index. | |
440 | p = pakfire.PakfireServer(arch=arch) | |
441 | ||
442 | p.repo_create(repo_path, source_files, | |
443 | name="%s - %s.%s" % (self.distro.name, self.name, arch), | |
444 | key_id=key_id) | |
445 | ||
446 | # Remove files afterwards. | |
447 | for file in remove_files: | |
448 | file = os.path.join(repo_path, file) | |
449 | ||
450 | try: | |
451 | os.remove(file) | |
452 | except OSError: | |
453 | log.warning("Could not remove %s." % file) | |
454 | ||
455 | def get_history(self, **kwargs): | |
456 | kwargs.update({ | |
457 | "repo" : self, | |
458 | }) | |
459 | ||
460 | return self.pakfire.repos.get_history(**kwargs) | |
461 | ||
462 | def get_build_times(self): | |
463 | times = [] | |
464 | for arch in self.arches: | |
465 | time = self.db.get("SELECT SUM(jobs.time_finished - jobs.time_started) AS time FROM jobs \ | |
466 | JOIN builds ON builds.id = jobs.build_id \ | |
467 | JOIN repositories_builds ON builds.id = repositories_builds.build_id \ | |
468 | WHERE (jobs.arch = %s OR jobs.arch = %s) AND \ | |
469 | jobs.type = 'build' AND \ | |
470 | repositories_builds.repo_id = %s", arch, "noarch", self.id) | |
471 | ||
472 | times.append((arch, time.time.total_seconds())) | |
473 | ||
474 | return times | |
475 | ||
476 | ||
477 | class RepositoryAux(base.DataObject): | |
478 | table = "repositories_aux" | |
479 | ||
480 | @property | |
481 | def name(self): | |
482 | return self.data.name | |
483 | ||
484 | @property | |
485 | def description(self): | |
486 | return self.data.description or "" | |
487 | ||
488 | @property | |
489 | def url(self): | |
490 | return self.data.url | |
491 | ||
492 | @property | |
493 | def identifier(self): | |
494 | return self.name.lower() | |
495 | ||
496 | @property | |
497 | def distro(self): | |
498 | return self.pakfire.distros.get_by_id(self.data.distro_id) | |
499 | ||
500 | def get_conf(self, local=False): | |
501 | lines = [ | |
502 | "[repo:%s]" % self.identifier, | |
503 | "description = %s - %s" % (self.distro.name, self.name), | |
504 | "enabled = 1", | |
505 | "baseurl = %s" % self.url, | |
506 | "priority = 0", | |
507 | ] | |
508 | ||
509 | return "\n".join(lines) |