]> git.ipfire.org Git - people/jschlag/pbs.git/blob - backend/uploads.py
Initial import.
[people/jschlag/pbs.git] / backend / uploads.py
1 #!/usr/bin/python
2
3 import hashlib
4 import logging
5 import os
6 import pakfire.packages
7 import uuid
8
9 import base
10 import misc
11 import packages
12
13 from constants import *
14
15 class Uploads(base.Object):
16 def get_by_uuid(self, _uuid):
17 upload = self.db.get("SELECT id FROM uploads WHERE uuid = %s", _uuid)
18
19 return Upload(self.pakfire, upload.id)
20
21 def new(self, *args, **kwargs):
22 return Upload.new(self.pakfire, *args, **kwargs)
23
24 def get_all(self):
25 uploads = self.db.query("SELECT id FROM uploads")
26
27 return [Upload(self.pakfire, u.id) for u in uploads]
28
29 def cleanup(self):
30 for upload in self.get_all():
31 upload.cleanup()
32
33
34 class Upload(base.Object):
35 def __init__(self, pakfire, id):
36 base.Object.__init__(self, pakfire)
37
38 self.id = id
39 self.data = self.db.get("SELECT * FROM uploads WHERE id = %s", self.id)
40
41 @classmethod
42 def new(cls, pakfire, builder, filename, size, hash):
43 _uuid = uuid.uuid4()
44
45 id = pakfire.db.execute("INSERT INTO uploads(uuid, builder, filename, size, hash)"
46 " VALUES(%s, %s, %s, %s, %s)", _uuid, builder.id, filename, size, hash)
47
48 upload = cls(pakfire, id)
49
50 # Create space to where we save the data.
51 dirname = os.path.dirname(upload.path)
52 if not os.path.exists(dirname):
53 os.makedirs(dirname)
54
55 # Create empty file.
56 f = open(upload.path, "w")
57 f.close()
58
59 return upload
60
61 @property
62 def uuid(self):
63 return self.data.uuid
64
65 @property
66 def hash(self):
67 return self.data.hash
68
69 @property
70 def filename(self):
71 return self.data.filename
72
73 @property
74 def path(self):
75 return os.path.join(UPLOADS_DIR, self.uuid, self.filename)
76
77 @property
78 def builder(self):
79 return self.pakfire.builders.get_by_id(self.data.builder)
80
81 def append(self, data):
82 logging.debug("Writing %s bytes to %s" % (len(data), self.path))
83
84 f = open(self.path, "ab")
85 f.write(data)
86 f.close()
87
88 def validate(self):
89 # Calculate a hash to validate the upload.
90 hash = misc.calc_hash1(self.path)
91
92 ret = self.hash == hash
93
94 if not ret:
95 logging.error("Hash did not match: %s != %s" % (self.hash, hash))
96
97 return ret
98
99 def remove(self):
100 # Remove the uploaded data.
101 if os.path.exists(self.path):
102 os.unlink(self.path)
103
104 # Delete the upload from the database.
105 self.db.execute("DELETE FROM uploads WHERE id = %s", self.id)
106
107 def time_start(self):
108 return self.data.time_start
109
110 def commit(self, build):
111 # Find out what kind of file this is.
112 filetype = misc.guess_filetype(self.path)
113
114 # If the filetype is unhandled, we remove the file and raise an
115 # exception.
116 if filetype == "unknown":
117 self.remove()
118 raise Exception, "Cannot handle unknown file."
119
120 # If file is a package we open it and insert its information to the
121 # database.
122 if filetype == "pkg":
123 logging.debug("%s is a package file." % self.path)
124 file = pakfire.packages.open(None, None, self.path)
125
126 if file.type == "source":
127 packages.Package.new(self.pakfire, file, build)
128
129 elif file.type == "binary":
130 build.pkg.add_file(file, build)
131
132 elif filetype == "log":
133 build.add_log(self.path)
134
135 # Finally, remove the upload.
136 self.remove()
137
138 def cleanup(self):
139 # Get the seconds since we are running.
140 try:
141 time_running = datetime.datetime.utcnow() - self.time_start
142 time_running = time_running.total_seconds()
143 except:
144 time_running = 0
145
146 # Remove uploads that are older than 24 hours.
147 if time_running >= 3600 * 24:
148 self.remove()