aboutsummaryrefslogtreecommitdiff
blob: 7457bda13be5d155a15af9d99d9155fec56eef72 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
#!/usr/bin/env python
import os, sys

from datetime import datetime
from fnmatch import fnmatch

from optparse import OptionParser

from pkgcore.config import load_config
from pkgcore.cache import metadata
from pkgcore.ebuild import repository

from snakeoil.fileutils import iter_read_bash

path = os.path.join(os.path.dirname(__file__), os.path.pardir)
sys.path.insert(0, path)
del path

from grumpy import app
from grumpy.models import (db, Category, Developer, Ebuild, Herd, \
                           Package, Setting)

UPDATE_DB_KEY = 'updates_info'

def main(path):
    # pkgcore part to fetch all the ebuild information
    conf = load_config()
    eclass_cache = conf.eclass_cache['eclass stack']
    cache = metadata.database(readonly=True, location=path)
    repo = repository.UnconfiguredTree(path, cache=cache, \
                                       eclass_cache=eclass_cache)

    def get_version(pkg, cpv):
        """..."""
        return cpv[len(pkg)+1:-7]

    def package_move(src, dst):
        """Rename package and its ebuild in database."""
        raise NotImplementedError
        srccat, srcpkg = src.split('/')
        dstcat, dstpkg = dst.split('/')
        print "DEBUG: Handling package move: %s/%s -> %s/%s" % \
                (srccat, srcpkg, dstcat, dstpkg)
        package = Package.query.filter_by(cat=srccat) \
                               .filter_by(pkg=srcpkg).first()
        if package:
            package.rename(dstcat, dstpkg)

    # Read package move information
    movedir = os.path.join(path, 'profiles', 'updates')
    if not os.path.isdir(movedir):
        print "DEBUG: Missing package moves directory: '%s'" % movedir
        raise RuntimeError

    get_key = lambda name: tuple(reversed(name.split('-')))
    move_files = sorted(os.listdir(movedir), key=get_key)
    update_file = move_files[-1]
    update_path = os.path.join(movedir, update_file)
    # Fetch existing setting data from database...
    prev_updates = Setting.query.filter_by(name=UPDATE_DB_KEY).first()
    if prev_updates:
        # Check for filename and mtime
        update_data = prev_updates.data
        if update_data['file'] == update_file:
            if update_data['mtime'] == int(os.stat(update_path).st_mtime):
                # update_file has not been changed, so fall through
                pass
            else:
                # Fetch new move rules
                for line in iter_read_bash(update_path):
                    line = line.split()
                    if line[0] == 'move' and \
                            line[1] not in update_data['moves'].keys():
                        package_move(line[1], line[2])
        # Handle new update files
        else:
            # Firstly, handle updates from current file...
            for line in iter_read_bash( \
                                os.path.join(movedir, update_data['file'])):
                line = line.split()
                if line[0] == 'move' and \
                        line[1] not in update_data['moves'].keys():
                    package_move(line[1], line[2])
            # ...and then move on to newer ones
            for file in move_files[move_files.index(update_data['file']):]:
                for line in iter_read_bash(os.path.join(movedir, file)):
                    line = line.split()
                    if line[0] == 'move':
                        package_move(line[1], line[2])

    # Parse (again) latest moves file and store its info in database
    if prev_updates:
        db.session.delete(prev_updates)
        db.session.flush()
    moves = {}
    for line in iter_read_bash(update_path):
        line = line.split()
        if line[0] == 'move':
            moves[line[1]] = line[2]
    data = dict(file=update_file, moves=moves, \
                mtime=int(os.stat(update_path).st_mtime))
    db.session.add(Setting(UPDATE_DB_KEY, data))
    db.session.commit()

    def package_sync(cat, pkg, files, mtime):
        """Update package information in database."""

        # Bail out early if package has no ebuilds
        if len(files) == 0:
            return

        # Fetch package from database
        p = Package.query.filter_by(category=cat,pkg=pkg).first()

        # Check whether package in database is up-to-date
        if p and p.mtime == datetime.fromtimestamp(mtime):
            assert len(p.ebuilds) == len(files)
            return

        print "DEBUG: updating package %s/%s" % (cat.name, pkg)

        # TODO: get rid of str(cat.name) after pkgcore learns about unicode
        ebuilds = [repo[(str(cat.name), pkg, get_version(pkg, f))] for f in files]

        # If package exists, remove outdated ebuilds
        if p:
            old = [p.ebuilds[e].cpv for e in p.ebuilds]
            new = [e.cpvstr for e in ebuilds]
            for ver in [item for item in old if item not in new]:
                db.session.delete(p.ebuilds[ver])
            # Sync package info
            cat.packages[p.key].sync(ebuilds[0], mtime)
        # otherwise, create package
        else:
            p = cat.packages[ebuilds[0].key] = Package(ebuilds[0], mtime)

        # Sync new versions of ebuilds
        for ebuild in ebuilds:
            if ebuild.cpvstr in p.ebuilds.keys():
                p.ebuilds[ebuild.cpvstr].sync(ebuild)
            else:
                p.ebuilds[ebuild.cpvstr] = Ebuild(ebuild)
        db.session.commit()

    # Compare list of categories in portage vs database
    old = [c.name for c in Category.query.all()]
    cats = repo.categories.keys()

    # Remove old categories
    for cat in [item for item in old if item not in cats]:
        c = Category.query.filter_by(name=cat).one()
        if Package.query.filter_by(category=c).count() > 0:
            # We shouldn't have anything with this category in db
            raise RuntimeError
        db.session.delete(c)
    # Add new categories
    for cat in cats:
        if cat not in old:
            db.session.add(Category(cat))
    db.session.commit()

    # Traverse portage
    for cat in cats:
        catdir = os.path.join(path, cat)

        # Get list of existing packages in this category
        c = Category.query.filter_by(name=cat).one()
        old = [p.pkg for p in Package.query.filter_by(category=c).all()]
        new = [d for d in os.listdir(catdir) \
                       if os.path.isdir(os.path.join(catdir, d))]

        for pkg in [i for i in old if i not in new]:
            # Handle package deletion
            print "DEBUG: package has been removed:", pkg
            package = Package.query.filter_by(category=c, pkg=pkg).one()
            db.session.delete(package)
            db.session.commit()
        for pkg in new:
            dir = os.path.join(catdir, pkg)
            files = [f for f in os.listdir(dir) if fnmatch(f, '*.ebuild')]
            package_sync(c, pkg, files, int(os.stat(dir).st_mtime))

if __name__ == '__main__':
    parser = OptionParser(usage="usage: %prog [options] CONFFILE")
    (opts, args) = parser.parse_args()
    if len(args) != 1:
        parser.error("provide path to configuration file as first argument")
        sys.exit(1)
    with app.test_request_context():
        app.config.from_pyfile(args[0])
        if 'GRUMPY_PORTAGE_DIR' in app.config.keys():
            portagedir = app.config['GRUMPY_PORTAGE_DIR']
            if os.path.isdir(portagedir):
                main(portagedir)